var/home/core/zuul-output/0000755000175000017500000000000015067566065014545 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067601244015476 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004736400415067601233017710 0ustar rootrootOct 02 21:23:27 crc systemd[1]: Starting Kubernetes Kubelet... Oct 02 21:23:27 crc restorecon[4559]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:27 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:28 crc restorecon[4559]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 21:23:28 crc restorecon[4559]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 02 21:23:29 crc kubenswrapper[4636]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 02 21:23:29 crc kubenswrapper[4636]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 02 21:23:29 crc kubenswrapper[4636]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 02 21:23:29 crc kubenswrapper[4636]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 02 21:23:29 crc kubenswrapper[4636]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 02 21:23:29 crc kubenswrapper[4636]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.149440 4636 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160449 4636 feature_gate.go:330] unrecognized feature gate: Example Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160509 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160524 4636 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160537 4636 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160550 4636 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160562 4636 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160572 4636 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160583 4636 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160593 4636 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160603 4636 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160613 4636 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160624 4636 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160634 4636 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160644 4636 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160655 4636 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160665 4636 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160680 4636 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160692 4636 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160703 4636 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160714 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160724 4636 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160735 4636 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160745 4636 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160792 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160802 4636 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160813 4636 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160823 4636 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160832 4636 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160840 4636 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160848 4636 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160858 4636 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160866 4636 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160876 4636 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160885 4636 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160894 4636 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160906 4636 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160922 4636 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160938 4636 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160952 4636 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160966 4636 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160978 4636 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.160989 4636 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161000 4636 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161011 4636 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161021 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161031 4636 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161041 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161051 4636 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161063 4636 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161073 4636 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161083 4636 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161094 4636 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161105 4636 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161115 4636 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161125 4636 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161136 4636 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161147 4636 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161164 4636 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161180 4636 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161193 4636 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161203 4636 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161214 4636 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161226 4636 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161237 4636 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161247 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161257 4636 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161267 4636 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161276 4636 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161285 4636 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161293 4636 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.161300 4636 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161474 4636 flags.go:64] FLAG: --address="0.0.0.0" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161494 4636 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161511 4636 flags.go:64] FLAG: --anonymous-auth="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161524 4636 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161536 4636 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161545 4636 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161557 4636 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161569 4636 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161578 4636 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161588 4636 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161598 4636 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161610 4636 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161620 4636 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161630 4636 flags.go:64] FLAG: --cgroup-root="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161639 4636 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161648 4636 flags.go:64] FLAG: --client-ca-file="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161657 4636 flags.go:64] FLAG: --cloud-config="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161668 4636 flags.go:64] FLAG: --cloud-provider="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161677 4636 flags.go:64] FLAG: --cluster-dns="[]" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161689 4636 flags.go:64] FLAG: --cluster-domain="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161698 4636 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161707 4636 flags.go:64] FLAG: --config-dir="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161717 4636 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161727 4636 flags.go:64] FLAG: --container-log-max-files="5" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161739 4636 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161782 4636 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161797 4636 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161811 4636 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161824 4636 flags.go:64] FLAG: --contention-profiling="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161836 4636 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161848 4636 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161863 4636 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161875 4636 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161891 4636 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161904 4636 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161916 4636 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161928 4636 flags.go:64] FLAG: --enable-load-reader="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161940 4636 flags.go:64] FLAG: --enable-server="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161952 4636 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161973 4636 flags.go:64] FLAG: --event-burst="100" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161986 4636 flags.go:64] FLAG: --event-qps="50" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.161999 4636 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162011 4636 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162023 4636 flags.go:64] FLAG: --eviction-hard="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162038 4636 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162050 4636 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162062 4636 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162077 4636 flags.go:64] FLAG: --eviction-soft="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162089 4636 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162102 4636 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162114 4636 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162126 4636 flags.go:64] FLAG: --experimental-mounter-path="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162138 4636 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162150 4636 flags.go:64] FLAG: --fail-swap-on="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162162 4636 flags.go:64] FLAG: --feature-gates="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162176 4636 flags.go:64] FLAG: --file-check-frequency="20s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162189 4636 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162202 4636 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162215 4636 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162228 4636 flags.go:64] FLAG: --healthz-port="10248" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162240 4636 flags.go:64] FLAG: --help="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162252 4636 flags.go:64] FLAG: --hostname-override="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162264 4636 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162277 4636 flags.go:64] FLAG: --http-check-frequency="20s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162290 4636 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162302 4636 flags.go:64] FLAG: --image-credential-provider-config="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162314 4636 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162326 4636 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162337 4636 flags.go:64] FLAG: --image-service-endpoint="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162350 4636 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162363 4636 flags.go:64] FLAG: --kube-api-burst="100" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162375 4636 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162387 4636 flags.go:64] FLAG: --kube-api-qps="50" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162400 4636 flags.go:64] FLAG: --kube-reserved="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162413 4636 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162424 4636 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162437 4636 flags.go:64] FLAG: --kubelet-cgroups="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162449 4636 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162461 4636 flags.go:64] FLAG: --lock-file="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162472 4636 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162485 4636 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162498 4636 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162518 4636 flags.go:64] FLAG: --log-json-split-stream="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162532 4636 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162545 4636 flags.go:64] FLAG: --log-text-split-stream="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162557 4636 flags.go:64] FLAG: --logging-format="text" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162567 4636 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162578 4636 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162588 4636 flags.go:64] FLAG: --manifest-url="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162598 4636 flags.go:64] FLAG: --manifest-url-header="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162651 4636 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162664 4636 flags.go:64] FLAG: --max-open-files="1000000" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162679 4636 flags.go:64] FLAG: --max-pods="110" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162691 4636 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162704 4636 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162720 4636 flags.go:64] FLAG: --memory-manager-policy="None" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162732 4636 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162744 4636 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162792 4636 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162805 4636 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162830 4636 flags.go:64] FLAG: --node-status-max-images="50" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162840 4636 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162852 4636 flags.go:64] FLAG: --oom-score-adj="-999" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162862 4636 flags.go:64] FLAG: --pod-cidr="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162871 4636 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162885 4636 flags.go:64] FLAG: --pod-manifest-path="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162894 4636 flags.go:64] FLAG: --pod-max-pids="-1" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162904 4636 flags.go:64] FLAG: --pods-per-core="0" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162913 4636 flags.go:64] FLAG: --port="10250" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162923 4636 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162934 4636 flags.go:64] FLAG: --provider-id="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162946 4636 flags.go:64] FLAG: --qos-reserved="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162959 4636 flags.go:64] FLAG: --read-only-port="10255" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162971 4636 flags.go:64] FLAG: --register-node="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162982 4636 flags.go:64] FLAG: --register-schedulable="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.162994 4636 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163015 4636 flags.go:64] FLAG: --registry-burst="10" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163026 4636 flags.go:64] FLAG: --registry-qps="5" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163037 4636 flags.go:64] FLAG: --reserved-cpus="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163050 4636 flags.go:64] FLAG: --reserved-memory="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163061 4636 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163071 4636 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163080 4636 flags.go:64] FLAG: --rotate-certificates="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163090 4636 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163100 4636 flags.go:64] FLAG: --runonce="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163111 4636 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163124 4636 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163137 4636 flags.go:64] FLAG: --seccomp-default="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163149 4636 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163161 4636 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163174 4636 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163187 4636 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163199 4636 flags.go:64] FLAG: --storage-driver-password="root" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163211 4636 flags.go:64] FLAG: --storage-driver-secure="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163223 4636 flags.go:64] FLAG: --storage-driver-table="stats" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163236 4636 flags.go:64] FLAG: --storage-driver-user="root" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163248 4636 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163261 4636 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163273 4636 flags.go:64] FLAG: --system-cgroups="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163284 4636 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163306 4636 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163317 4636 flags.go:64] FLAG: --tls-cert-file="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163329 4636 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163344 4636 flags.go:64] FLAG: --tls-min-version="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163356 4636 flags.go:64] FLAG: --tls-private-key-file="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163367 4636 flags.go:64] FLAG: --topology-manager-policy="none" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163379 4636 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163392 4636 flags.go:64] FLAG: --topology-manager-scope="container" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163403 4636 flags.go:64] FLAG: --v="2" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163419 4636 flags.go:64] FLAG: --version="false" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163433 4636 flags.go:64] FLAG: --vmodule="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163447 4636 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.163536 4636 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163796 4636 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163813 4636 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163830 4636 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163842 4636 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163853 4636 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163864 4636 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163875 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163885 4636 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163896 4636 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163905 4636 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163916 4636 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163927 4636 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163937 4636 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163947 4636 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163958 4636 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163969 4636 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163979 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163989 4636 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.163999 4636 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164009 4636 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164020 4636 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164030 4636 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164039 4636 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164047 4636 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164055 4636 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164063 4636 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164071 4636 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164079 4636 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164087 4636 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164094 4636 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164103 4636 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164111 4636 feature_gate.go:330] unrecognized feature gate: Example Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164119 4636 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164127 4636 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164135 4636 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164143 4636 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164151 4636 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164160 4636 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164179 4636 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164192 4636 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164204 4636 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164215 4636 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164226 4636 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164236 4636 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164247 4636 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164257 4636 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164269 4636 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164279 4636 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164289 4636 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164303 4636 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164316 4636 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164329 4636 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164340 4636 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164352 4636 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164366 4636 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164381 4636 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164391 4636 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164402 4636 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164412 4636 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164423 4636 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164432 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164440 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164449 4636 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164457 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164467 4636 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164477 4636 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164486 4636 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164497 4636 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164507 4636 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164516 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.164524 4636 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.164538 4636 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.174501 4636 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.174545 4636 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174629 4636 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174665 4636 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174672 4636 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174678 4636 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174684 4636 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174691 4636 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174698 4636 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174716 4636 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174721 4636 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174726 4636 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174731 4636 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174736 4636 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174740 4636 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174745 4636 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174762 4636 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174767 4636 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174771 4636 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174776 4636 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174780 4636 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174784 4636 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174789 4636 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174794 4636 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174798 4636 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174802 4636 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174807 4636 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174811 4636 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174815 4636 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174820 4636 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174824 4636 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174831 4636 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174835 4636 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174842 4636 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174847 4636 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174851 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174855 4636 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174860 4636 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174865 4636 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174870 4636 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174875 4636 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174879 4636 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174884 4636 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174889 4636 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174893 4636 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174898 4636 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174903 4636 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174907 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174911 4636 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174915 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174920 4636 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174924 4636 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174928 4636 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174933 4636 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174937 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174942 4636 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174947 4636 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174954 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174959 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174964 4636 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174969 4636 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174974 4636 feature_gate.go:330] unrecognized feature gate: Example Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174979 4636 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174985 4636 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.174990 4636 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175001 4636 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175006 4636 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175012 4636 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175017 4636 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175023 4636 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175027 4636 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175032 4636 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175038 4636 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.175047 4636 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175235 4636 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175243 4636 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175248 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175252 4636 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175257 4636 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175262 4636 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175267 4636 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175271 4636 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175276 4636 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175280 4636 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175284 4636 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175290 4636 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175295 4636 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175300 4636 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175306 4636 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175312 4636 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175317 4636 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175438 4636 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175443 4636 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175448 4636 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175466 4636 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175471 4636 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175475 4636 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175480 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175486 4636 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175491 4636 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175496 4636 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175502 4636 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175507 4636 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175513 4636 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175518 4636 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175524 4636 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175529 4636 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175534 4636 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175539 4636 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175544 4636 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175549 4636 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175554 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175559 4636 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175564 4636 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175569 4636 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175574 4636 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175581 4636 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175588 4636 feature_gate.go:330] unrecognized feature gate: Example Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175793 4636 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175798 4636 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175803 4636 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175809 4636 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175815 4636 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175820 4636 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175827 4636 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175832 4636 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175838 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175843 4636 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175848 4636 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175854 4636 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175859 4636 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175864 4636 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175869 4636 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175875 4636 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175881 4636 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175886 4636 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175891 4636 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175896 4636 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175902 4636 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175908 4636 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175913 4636 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175918 4636 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175922 4636 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175927 4636 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.175932 4636 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.175940 4636 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.176978 4636 server.go:940] "Client rotation is on, will bootstrap in background" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.181395 4636 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.181502 4636 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.184823 4636 server.go:997] "Starting client certificate rotation" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.184905 4636 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.186178 4636 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-01 13:37:35.847085291 +0000 UTC Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.186351 4636 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1432h14m6.66073994s for next certificate rotation Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.243791 4636 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.250479 4636 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.292383 4636 log.go:25] "Validated CRI v1 runtime API" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.372617 4636 log.go:25] "Validated CRI v1 image API" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.375657 4636 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.391607 4636 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-02-21-17-44-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.391665 4636 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.417580 4636 manager.go:217] Machine: {Timestamp:2025-10-02 21:23:29.411994712 +0000 UTC m=+0.735002791 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2800000 MemoryCapacity:25199476736 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:3ae1382e-dfe6-49ba-a6ed-d50a2758b26a BootID:cf7a8497-d011-4aa1-ae8c-a105b6bba068 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599738368 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:29:94:62 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:29:94:62 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:6e:c3:07 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:af:40:7f Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:54:a5:c5 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:7f:ce:21 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:4e:e4:01:8e:e7:ec Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:f2:33:70:6d:2a:37 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199476736 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.418002 4636 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.418237 4636 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.420856 4636 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.421374 4636 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.421455 4636 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.421854 4636 topology_manager.go:138] "Creating topology manager with none policy" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.421878 4636 container_manager_linux.go:303] "Creating device plugin manager" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.422658 4636 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.422704 4636 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.423091 4636 state_mem.go:36] "Initialized new in-memory state store" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.423322 4636 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.440923 4636 kubelet.go:418] "Attempting to sync node with API server" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.440984 4636 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.441034 4636 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.441057 4636 kubelet.go:324] "Adding apiserver pod source" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.441078 4636 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.446523 4636 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.454473 4636 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.467869 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.468030 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.468204 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.468356 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.526296 4636 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528372 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528419 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528434 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528449 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528472 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528487 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528502 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528524 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528541 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528556 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528576 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.528590 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.529568 4636 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.530311 4636 server.go:1280] "Started kubelet" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.531481 4636 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.531484 4636 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.532283 4636 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 02 21:23:29 crc systemd[1]: Started Kubernetes Kubelet. Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.534260 4636 server.go:460] "Adding debug handlers to kubelet server" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.536434 4636 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.536501 4636 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.536556 4636 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 17:20:56.361414484 +0000 UTC Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.536614 4636 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1627h57m26.824802207s for next certificate rotation Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.536650 4636 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.536657 4636 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.536668 4636 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.536735 4636 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.538541 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.538639 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.539340 4636 factory.go:55] Registering systemd factory Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.539389 4636 factory.go:221] Registration of the systemd container factory successfully Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.546229 4636 factory.go:153] Registering CRI-O factory Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.546379 4636 factory.go:221] Registration of the crio container factory successfully Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.546375 4636 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.47:6443: connect: connection refused" interval="200ms" Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.539104 4636 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.47:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186ac98c7497103a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-02 21:23:29.530261562 +0000 UTC m=+0.853269611,LastTimestamp:2025-10-02 21:23:29.530261562 +0000 UTC m=+0.853269611,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.546265 4636 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.546829 4636 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.547274 4636 factory.go:103] Registering Raw factory Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.547337 4636 manager.go:1196] Started watching for new ooms in manager Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.548558 4636 manager.go:319] Starting recovery of all containers Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.557916 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.557976 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.557991 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558004 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558014 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558026 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558037 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558047 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558062 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558073 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558084 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558094 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558104 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558114 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558147 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558162 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558174 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558185 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558198 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558228 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558239 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558252 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558264 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558278 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558290 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558304 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558319 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558332 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558361 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558375 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558386 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558396 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558408 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558421 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558436 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558448 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558462 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558478 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558495 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558512 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558547 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558562 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558575 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558605 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558618 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558629 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558641 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558652 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558665 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558696 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558709 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558726 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558742 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558778 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558789 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558801 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558813 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558827 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558839 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558851 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558863 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558875 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558890 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558903 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558916 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558930 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558941 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558952 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558962 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558973 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558983 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.558993 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559004 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559017 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559030 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559042 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559052 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559062 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559073 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559085 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559096 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559108 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559118 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559133 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559143 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559153 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559165 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559175 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559186 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559199 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559209 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559222 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559234 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559245 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559256 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559269 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559280 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559292 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.559303 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562111 4636 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562138 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562150 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562159 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562170 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562180 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562201 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562213 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562223 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562235 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562249 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562261 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562273 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562285 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562295 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562306 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562319 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562330 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562341 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562351 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562361 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562371 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562382 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562391 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562401 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562410 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562419 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562428 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562437 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562447 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562455 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562465 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562475 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562484 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562497 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562508 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562518 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562527 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562537 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562546 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562556 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562566 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562577 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562587 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562597 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562606 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562615 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562625 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562634 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562644 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562653 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562663 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562672 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562683 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562693 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562704 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562713 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562723 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562732 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562746 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562781 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562793 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562803 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562814 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562823 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562835 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562845 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562858 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562868 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562879 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562890 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562899 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562910 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562920 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562929 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562939 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562951 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562961 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562972 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562981 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562990 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.562999 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563008 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563019 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563031 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563040 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563051 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563060 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563069 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563078 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563087 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563096 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563107 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563116 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563126 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563136 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563146 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563154 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563164 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563173 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563183 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563191 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563201 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563210 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563220 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563230 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563239 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563259 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563269 4636 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563277 4636 reconstruct.go:97] "Volume reconstruction finished" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.563285 4636 reconciler.go:26] "Reconciler: start to sync state" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.590985 4636 manager.go:324] Recovery completed Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.599854 4636 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.601630 4636 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.602390 4636 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.602414 4636 kubelet.go:2335] "Starting kubelet main sync loop" Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.602519 4636 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 02 21:23:29 crc kubenswrapper[4636]: W1002 21:23:29.603452 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.603529 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.605522 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.607078 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.607104 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.607113 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.608110 4636 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.608123 4636 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.608141 4636 state_mem.go:36] "Initialized new in-memory state store" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.626502 4636 policy_none.go:49] "None policy: Start" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.627444 4636 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.627466 4636 state_mem.go:35] "Initializing new in-memory state store" Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.636825 4636 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.690308 4636 manager.go:334] "Starting Device Plugin manager" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.690554 4636 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.690588 4636 server.go:79] "Starting device plugin registration server" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.691170 4636 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.691198 4636 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.691655 4636 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.691787 4636 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.691820 4636 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.701495 4636 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.702834 4636 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.702917 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.704058 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.704102 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.704115 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.704331 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.704685 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.704728 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.705232 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.705261 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.705274 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.705398 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.705999 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.706030 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.706554 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.706587 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.706599 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.707233 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.707273 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.707285 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.707513 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.708512 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.708539 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.708551 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.709199 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.709224 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.709237 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.709413 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.709966 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.710013 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.710974 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.711022 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.711541 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.711576 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.711593 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.711722 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.711738 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.711783 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.711801 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.711809 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.713362 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.713382 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.713391 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.713501 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.713512 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.713520 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.747365 4636 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.47:6443: connect: connection refused" interval="400ms" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765231 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765275 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765304 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765330 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765351 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765376 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765401 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765423 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765443 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765469 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765496 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765557 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765586 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765609 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.765630 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.791907 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.792941 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.792987 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.792999 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.793035 4636 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.793480 4636 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.47:6443: connect: connection refused" node="crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867190 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867260 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867303 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867334 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867366 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867395 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867432 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867461 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867510 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867555 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867553 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867836 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867598 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867894 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867907 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867916 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867875 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867941 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867591 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.867555 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.868006 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.868117 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.868161 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.868238 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.868283 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.868321 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.868368 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.868399 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.868437 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.868521 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.994345 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.995422 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.995452 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.995461 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:29 crc kubenswrapper[4636]: I1002 21:23:29.995484 4636 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 21:23:29 crc kubenswrapper[4636]: E1002 21:23:29.995938 4636 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.47:6443: connect: connection refused" node="crc" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.055732 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.074693 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.091491 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.100348 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.107365 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 02 21:23:30 crc kubenswrapper[4636]: W1002 21:23:30.111207 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-3c9cc2b1bcf6fa35f466753a855085ab1e6983ea3136fb77947d020f9713c442 WatchSource:0}: Error finding container 3c9cc2b1bcf6fa35f466753a855085ab1e6983ea3136fb77947d020f9713c442: Status 404 returned error can't find the container with id 3c9cc2b1bcf6fa35f466753a855085ab1e6983ea3136fb77947d020f9713c442 Oct 02 21:23:30 crc kubenswrapper[4636]: W1002 21:23:30.113478 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-16ecb0ecef9e1598a28e004d897c2a670f6fa4e3a24e5dc4c0b56ae115d65b28 WatchSource:0}: Error finding container 16ecb0ecef9e1598a28e004d897c2a670f6fa4e3a24e5dc4c0b56ae115d65b28: Status 404 returned error can't find the container with id 16ecb0ecef9e1598a28e004d897c2a670f6fa4e3a24e5dc4c0b56ae115d65b28 Oct 02 21:23:30 crc kubenswrapper[4636]: W1002 21:23:30.117676 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-f9069580160af0427910e9694b5a0abc6aaacce01182d02cc5eedeeca1d013dc WatchSource:0}: Error finding container f9069580160af0427910e9694b5a0abc6aaacce01182d02cc5eedeeca1d013dc: Status 404 returned error can't find the container with id f9069580160af0427910e9694b5a0abc6aaacce01182d02cc5eedeeca1d013dc Oct 02 21:23:30 crc kubenswrapper[4636]: W1002 21:23:30.125334 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-6456b4ef5d5d970bdc5f479d2fd5a73e7cc94e42173487c5826f0bca0552b2af WatchSource:0}: Error finding container 6456b4ef5d5d970bdc5f479d2fd5a73e7cc94e42173487c5826f0bca0552b2af: Status 404 returned error can't find the container with id 6456b4ef5d5d970bdc5f479d2fd5a73e7cc94e42173487c5826f0bca0552b2af Oct 02 21:23:30 crc kubenswrapper[4636]: W1002 21:23:30.131874 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-c32eeab6205b75dea5b310058453853943e9a4a4eb26e07c9b1bd90a3c55272a WatchSource:0}: Error finding container c32eeab6205b75dea5b310058453853943e9a4a4eb26e07c9b1bd90a3c55272a: Status 404 returned error can't find the container with id c32eeab6205b75dea5b310058453853943e9a4a4eb26e07c9b1bd90a3c55272a Oct 02 21:23:30 crc kubenswrapper[4636]: E1002 21:23:30.148043 4636 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.47:6443: connect: connection refused" interval="800ms" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.396836 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.398042 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.398091 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.398102 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.398130 4636 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 21:23:30 crc kubenswrapper[4636]: E1002 21:23:30.398557 4636 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.47:6443: connect: connection refused" node="crc" Oct 02 21:23:30 crc kubenswrapper[4636]: W1002 21:23:30.543924 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:30 crc kubenswrapper[4636]: E1002 21:23:30.544064 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.548015 4636 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.609647 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c32eeab6205b75dea5b310058453853943e9a4a4eb26e07c9b1bd90a3c55272a"} Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.613846 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6456b4ef5d5d970bdc5f479d2fd5a73e7cc94e42173487c5826f0bca0552b2af"} Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.615997 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f9069580160af0427910e9694b5a0abc6aaacce01182d02cc5eedeeca1d013dc"} Oct 02 21:23:30 crc kubenswrapper[4636]: W1002 21:23:30.616607 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:30 crc kubenswrapper[4636]: E1002 21:23:30.616693 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.617532 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"16ecb0ecef9e1598a28e004d897c2a670f6fa4e3a24e5dc4c0b56ae115d65b28"} Oct 02 21:23:30 crc kubenswrapper[4636]: I1002 21:23:30.618736 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3c9cc2b1bcf6fa35f466753a855085ab1e6983ea3136fb77947d020f9713c442"} Oct 02 21:23:30 crc kubenswrapper[4636]: W1002 21:23:30.720713 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:30 crc kubenswrapper[4636]: E1002 21:23:30.720803 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:30 crc kubenswrapper[4636]: E1002 21:23:30.948892 4636 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.47:6443: connect: connection refused" interval="1.6s" Oct 02 21:23:31 crc kubenswrapper[4636]: E1002 21:23:31.061852 4636 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.47:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186ac98c7497103a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-02 21:23:29.530261562 +0000 UTC m=+0.853269611,LastTimestamp:2025-10-02 21:23:29.530261562 +0000 UTC m=+0.853269611,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 02 21:23:31 crc kubenswrapper[4636]: W1002 21:23:31.119261 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:31 crc kubenswrapper[4636]: E1002 21:23:31.119421 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.199035 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.201098 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.201134 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.201144 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.201170 4636 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 21:23:31 crc kubenswrapper[4636]: E1002 21:23:31.201498 4636 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.47:6443: connect: connection refused" node="crc" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.548078 4636 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.622637 4636 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4" exitCode=0 Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.622784 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.622807 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4"} Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.623443 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.623463 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.623472 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.624234 4636 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="c9f4cba62e914322fc2b15b0bffaa03cda012be340add72b99195047ba01f133" exitCode=0 Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.624302 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.624305 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"c9f4cba62e914322fc2b15b0bffaa03cda012be340add72b99195047ba01f133"} Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.625554 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.625592 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.625610 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.627697 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a"} Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.627769 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b"} Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.627780 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93"} Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.627789 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0"} Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.627854 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.628502 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.628525 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.628533 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.630036 4636 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87" exitCode=0 Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.630120 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87"} Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.630198 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.631602 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.631626 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.631635 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.633640 4636 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="568beaa08ecec10704dce18c1dd4fa9a26aaf3b763b52ee2e66fab59f1cd009b" exitCode=0 Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.633714 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"568beaa08ecec10704dce18c1dd4fa9a26aaf3b763b52ee2e66fab59f1cd009b"} Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.633792 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.636342 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.636395 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.636413 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.639572 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.640262 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.640285 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:31 crc kubenswrapper[4636]: I1002 21:23:31.640297 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:32 crc kubenswrapper[4636]: W1002 21:23:32.491098 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:32 crc kubenswrapper[4636]: E1002 21:23:32.491212 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.547706 4636 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:32 crc kubenswrapper[4636]: E1002 21:23:32.550119 4636 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.47:6443: connect: connection refused" interval="3.2s" Oct 02 21:23:32 crc kubenswrapper[4636]: W1002 21:23:32.601962 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:32 crc kubenswrapper[4636]: E1002 21:23:32.602096 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.638309 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231"} Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.638360 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39"} Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.638372 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351"} Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.638460 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.639562 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.639601 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.639615 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.641012 4636 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7e309717264f5e4296be905e0793767fcd8dd49ecb95df12baf4f2969499e388" exitCode=0 Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.641079 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7e309717264f5e4296be905e0793767fcd8dd49ecb95df12baf4f2969499e388"} Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.641137 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.642008 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.642054 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.642068 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.645938 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd"} Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.645971 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f"} Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.645981 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5"} Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.648272 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.648273 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.648267 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"4e5aecba03015137e66b951dc3c3a23f42866c1c501211aca641fd30d28e594d"} Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.649628 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.649659 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.649670 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.650190 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.650215 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.650226 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.802640 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.812355 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.812395 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.812407 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:32 crc kubenswrapper[4636]: I1002 21:23:32.812440 4636 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 21:23:32 crc kubenswrapper[4636]: E1002 21:23:32.813042 4636 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.47:6443: connect: connection refused" node="crc" Oct 02 21:23:33 crc kubenswrapper[4636]: W1002 21:23:33.346377 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:33 crc kubenswrapper[4636]: E1002 21:23:33.346466 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:33 crc kubenswrapper[4636]: W1002 21:23:33.416718 4636 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.47:6443: connect: connection refused Oct 02 21:23:33 crc kubenswrapper[4636]: E1002 21:23:33.416853 4636 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.47:6443: connect: connection refused" logger="UnhandledError" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.654240 4636 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="23c5e28df3338c3d44dbcd1af2ea0b2b4b9e5355ef746af0bf9e51610d0ca833" exitCode=0 Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.654409 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"23c5e28df3338c3d44dbcd1af2ea0b2b4b9e5355ef746af0bf9e51610d0ca833"} Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.654625 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.656536 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.656595 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.656614 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.660935 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"351a619970bd23ab2ef54bbbd68e4971df488c991c2095798a140f906ab7271b"} Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.661007 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c"} Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.661027 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.661074 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.661199 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.661074 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.662853 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.662883 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.662856 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.662909 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.662953 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.662928 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.662982 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.662934 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.663109 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.853404 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.853594 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.855062 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.855096 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:33 crc kubenswrapper[4636]: I1002 21:23:33.855110 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.558894 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.567939 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.670461 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"414fa774f74b570833b95ee6b801bf008de3982508b0dfbbb8a3f0709ad70f5f"} Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.670537 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"12effe2aa8523df903c713d3d593f27644ba2bdce0345484034a122fef252bbd"} Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.670557 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.670568 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9b0b0e0c5ceef3c7c13fc4a121b1fb493441bf8c68d853d15dcc960af616f4d7"} Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.670626 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.670644 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.670714 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.672740 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.672797 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.672812 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.673492 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.673554 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.673579 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.673640 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.673697 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:34 crc kubenswrapper[4636]: I1002 21:23:34.673716 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:35 crc kubenswrapper[4636]: I1002 21:23:35.680021 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"af7e9f863ede0560b0fbf73b6396981cd2ebc28be367bad8e3e354ca19c5403d"} Oct 02 21:23:35 crc kubenswrapper[4636]: I1002 21:23:35.680103 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:35 crc kubenswrapper[4636]: I1002 21:23:35.680115 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f63acfbffffe3d0d63ed2a88629d2115c53795e9a3ddcea1c1fc11cf0a92cb2a"} Oct 02 21:23:35 crc kubenswrapper[4636]: I1002 21:23:35.680272 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:23:35 crc kubenswrapper[4636]: I1002 21:23:35.680351 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:35 crc kubenswrapper[4636]: I1002 21:23:35.681455 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:35 crc kubenswrapper[4636]: I1002 21:23:35.681499 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:35 crc kubenswrapper[4636]: I1002 21:23:35.681516 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:35 crc kubenswrapper[4636]: I1002 21:23:35.682127 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:35 crc kubenswrapper[4636]: I1002 21:23:35.682233 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:35 crc kubenswrapper[4636]: I1002 21:23:35.682260 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.013716 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.016308 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.016393 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.016413 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.016466 4636 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.022644 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.022950 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.023028 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.024558 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.024606 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.024626 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.150590 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.682523 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.682599 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.683727 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.684010 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.684046 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.684067 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.684911 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.684954 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.684972 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.847843 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.848093 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.849858 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.849913 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:36 crc kubenswrapper[4636]: I1002 21:23:36.849931 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:37 crc kubenswrapper[4636]: I1002 21:23:37.308500 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:37 crc kubenswrapper[4636]: I1002 21:23:37.385206 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 02 21:23:37 crc kubenswrapper[4636]: I1002 21:23:37.573316 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 02 21:23:37 crc kubenswrapper[4636]: I1002 21:23:37.686011 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:37 crc kubenswrapper[4636]: I1002 21:23:37.690879 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:37 crc kubenswrapper[4636]: I1002 21:23:37.691909 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:37 crc kubenswrapper[4636]: I1002 21:23:37.691975 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:37 crc kubenswrapper[4636]: I1002 21:23:37.692000 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:37 crc kubenswrapper[4636]: I1002 21:23:37.692570 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:37 crc kubenswrapper[4636]: I1002 21:23:37.692714 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:37 crc kubenswrapper[4636]: I1002 21:23:37.692746 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:38 crc kubenswrapper[4636]: I1002 21:23:38.692160 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:38 crc kubenswrapper[4636]: I1002 21:23:38.694125 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:38 crc kubenswrapper[4636]: I1002 21:23:38.694226 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:38 crc kubenswrapper[4636]: I1002 21:23:38.694247 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:39 crc kubenswrapper[4636]: I1002 21:23:39.694416 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:39 crc kubenswrapper[4636]: I1002 21:23:39.694742 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:39 crc kubenswrapper[4636]: I1002 21:23:39.696213 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:39 crc kubenswrapper[4636]: I1002 21:23:39.696265 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:39 crc kubenswrapper[4636]: I1002 21:23:39.696286 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:39 crc kubenswrapper[4636]: E1002 21:23:39.701704 4636 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 02 21:23:42 crc kubenswrapper[4636]: I1002 21:23:42.694522 4636 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 02 21:23:42 crc kubenswrapper[4636]: I1002 21:23:42.694930 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.485151 4636 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:44958->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.485238 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:44958->192.168.126.11:17697: read: connection reset by peer" Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.548954 4636 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.708990 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.713207 4636 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="351a619970bd23ab2ef54bbbd68e4971df488c991c2095798a140f906ab7271b" exitCode=255 Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.713397 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"351a619970bd23ab2ef54bbbd68e4971df488c991c2095798a140f906ab7271b"} Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.713903 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.715392 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.715589 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.715784 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.716811 4636 scope.go:117] "RemoveContainer" containerID="351a619970bd23ab2ef54bbbd68e4971df488c991c2095798a140f906ab7271b" Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.950836 4636 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.950947 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.958233 4636 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 02 21:23:43 crc kubenswrapper[4636]: I1002 21:23:43.958325 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 02 21:23:44 crc kubenswrapper[4636]: I1002 21:23:44.718612 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 02 21:23:44 crc kubenswrapper[4636]: I1002 21:23:44.721931 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b"} Oct 02 21:23:44 crc kubenswrapper[4636]: I1002 21:23:44.722139 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:44 crc kubenswrapper[4636]: I1002 21:23:44.723678 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:44 crc kubenswrapper[4636]: I1002 21:23:44.723732 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:44 crc kubenswrapper[4636]: I1002 21:23:44.723797 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.159719 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.160041 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.160196 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.161670 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.161714 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.161732 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.169830 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.728313 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.730151 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.730223 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.730247 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.853296 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.853500 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.854824 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.854863 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:46 crc kubenswrapper[4636]: I1002 21:23:46.854875 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.616273 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.616552 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.618294 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.618364 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.618390 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.633731 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.730914 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.731017 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.732454 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.732505 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.732460 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.732523 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.732558 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:47 crc kubenswrapper[4636]: I1002 21:23:47.732735 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:48 crc kubenswrapper[4636]: E1002 21:23:48.931895 4636 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 02 21:23:48 crc kubenswrapper[4636]: I1002 21:23:48.939562 4636 trace.go:236] Trace[118863673]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Oct-2025 21:23:36.103) (total time: 12835ms): Oct 02 21:23:48 crc kubenswrapper[4636]: Trace[118863673]: ---"Objects listed" error: 12835ms (21:23:48.939) Oct 02 21:23:48 crc kubenswrapper[4636]: Trace[118863673]: [12.835481414s] [12.835481414s] END Oct 02 21:23:48 crc kubenswrapper[4636]: I1002 21:23:48.939617 4636 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 02 21:23:48 crc kubenswrapper[4636]: I1002 21:23:48.941285 4636 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 02 21:23:48 crc kubenswrapper[4636]: I1002 21:23:48.942812 4636 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 02 21:23:48 crc kubenswrapper[4636]: I1002 21:23:48.943183 4636 trace.go:236] Trace[1191948375]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Oct-2025 21:23:36.088) (total time: 12854ms): Oct 02 21:23:48 crc kubenswrapper[4636]: Trace[1191948375]: ---"Objects listed" error: 12853ms (21:23:48.942) Oct 02 21:23:48 crc kubenswrapper[4636]: Trace[1191948375]: [12.854136505s] [12.854136505s] END Oct 02 21:23:48 crc kubenswrapper[4636]: I1002 21:23:48.943233 4636 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 02 21:23:48 crc kubenswrapper[4636]: E1002 21:23:48.945554 4636 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 02 21:23:48 crc kubenswrapper[4636]: I1002 21:23:48.945745 4636 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.452986 4636 apiserver.go:52] "Watching apiserver" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.732067 4636 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.732383 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.732994 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.733056 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.733121 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.733399 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.733427 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.733590 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.733622 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.734438 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.734780 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.737690 4636 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.740783 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.740868 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.740963 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.741048 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.741064 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.741232 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.744052 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.744727 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.750266 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.772279 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.775980 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.795405 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.798716 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.798782 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.798810 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.798837 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.798860 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.798883 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.798904 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.798929 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.798975 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.798996 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799017 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799063 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799084 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799106 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799128 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799129 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799151 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799181 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799206 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799227 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799258 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799281 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799302 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799322 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799342 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799360 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799355 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799379 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799399 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799423 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799443 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799471 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799494 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799517 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799538 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799560 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799583 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799628 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799651 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799673 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799694 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799717 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799741 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799781 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799801 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799823 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799849 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799871 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799893 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799940 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799963 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799984 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800005 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800026 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800048 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800068 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800179 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800202 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800223 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800244 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800265 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800286 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800305 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800326 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800344 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800364 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800384 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800405 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800427 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800445 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800464 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800483 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800508 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800527 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800545 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800562 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800581 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800603 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800622 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800639 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800656 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800671 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800689 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800707 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800725 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800783 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800806 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800827 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800849 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800870 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800889 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800908 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800927 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800948 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800968 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800986 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801008 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801028 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801047 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801064 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801086 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801106 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801125 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801145 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801163 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801183 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801200 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801222 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801239 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801258 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801274 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801292 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801310 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801327 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801346 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801363 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801380 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801396 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801413 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801430 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801447 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801468 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801487 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801508 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801527 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801545 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801563 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801583 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801601 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801627 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801646 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801673 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799373 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799435 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.799658 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800112 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800148 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800298 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800326 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800423 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800518 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800563 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800595 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800729 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800817 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800922 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800988 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.800997 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.818110 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801121 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801148 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801179 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801199 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801282 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801415 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801450 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801550 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801636 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.801703 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:23:50.30167634 +0000 UTC m=+21.624684359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801732 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801917 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.801994 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.802077 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.802153 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.802249 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.802262 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.802355 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.802432 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.802580 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.802638 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.802707 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.803038 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.816018 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.816686 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.816962 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.818414 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.818498 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.819373 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.819599 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.819806 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.819980 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820122 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820199 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820277 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820344 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820410 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820485 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820552 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820623 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820689 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820769 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820850 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820921 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820992 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821057 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821122 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821198 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821263 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821330 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821401 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821466 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821536 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821619 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821711 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821818 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821925 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822001 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822093 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822168 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822242 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822308 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822408 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822489 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822576 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822674 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822773 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822851 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822924 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822992 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.823061 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.823142 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.823209 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.823279 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.823351 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.823415 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.823478 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.823545 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.823617 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.823681 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.823954 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.824026 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.824110 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.825322 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.825403 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.825478 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.825545 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.825617 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.825693 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.825822 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.825901 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827075 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827123 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827165 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827192 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827219 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827320 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827358 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827385 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827409 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827430 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827483 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827509 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827534 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827556 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827576 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827598 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827637 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827663 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827684 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827814 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827833 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827845 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827859 4636 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827869 4636 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827881 4636 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827895 4636 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827906 4636 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827940 4636 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827952 4636 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827963 4636 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827974 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827987 4636 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.828001 4636 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.828013 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.828023 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820629 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820669 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820689 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.820974 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.821677 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822134 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.822261 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.824016 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.825815 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.826031 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.826129 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.826456 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.826537 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.826661 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827063 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827082 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827193 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827321 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827530 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.827640 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.828491 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.828532 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.828541 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.828846 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.828907 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.828931 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.829065 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.829113 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.829608 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.829672 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.829919 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.830055 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.830115 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.830368 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.830534 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.831193 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.833885 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.834155 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.834380 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.834618 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.835178 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.835477 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.835814 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.837787 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.839030 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.839251 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.839400 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.840517 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.841105 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.841440 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.842787 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.842998 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.843248 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.843970 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.844450 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.847076 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.847304 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.847317 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.857049 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.857453 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.857403 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.857803 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.857851 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.858135 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.858284 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.858353 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.858458 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.858514 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.858906 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.859040 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.859137 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.859992 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.860100 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.860485 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.861021 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.861010 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.861153 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.861361 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.861419 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.861524 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.861674 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.861920 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.861953 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.861960 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.862334 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.862466 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.862401 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.862546 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.862895 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.864172 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.864306 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.864867 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.865518 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.866480 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.866564 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.866697 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.867264 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.867307 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.867815 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.867914 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.868133 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.868376 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.868617 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.868838 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.868884 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.869225 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.869384 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.869474 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.871113 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.871254 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.873287 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.873437 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.873824 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.873845 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.874054 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.874202 4636 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.874367 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:50.374343343 +0000 UTC m=+21.697351362 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.881590 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.881824 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.881997 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.882068 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.882198 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.882984 4636 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.883208 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.883787 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.884035 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.884355 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.886929 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.887042 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.887216 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.887647 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.887775 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.888390 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.889185 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.887413 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.891226 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.891298 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.893008 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.893076 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.893834 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.894188 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.894909 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.895549 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.895953 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.896438 4636 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.896527 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:50.396505214 +0000 UTC m=+21.719513223 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.896556 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.905942 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.906727 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.913075 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.913107 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.919264 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.919712 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.919996 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.920005 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.920023 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.926362 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932137 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932223 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932278 4636 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932270 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932339 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932292 4636 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932392 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932401 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932412 4636 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932421 4636 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932429 4636 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932438 4636 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932446 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932455 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932464 4636 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932473 4636 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932482 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932491 4636 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932499 4636 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932508 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932516 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932524 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932533 4636 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932543 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932554 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932565 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932575 4636 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932586 4636 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932597 4636 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932609 4636 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932617 4636 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932625 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932634 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932642 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932651 4636 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932659 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932667 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932677 4636 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932687 4636 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932698 4636 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932709 4636 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932721 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932730 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932740 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932765 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932774 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932783 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932806 4636 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932815 4636 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932824 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932833 4636 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932843 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.932852 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933219 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933240 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933252 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933264 4636 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933273 4636 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933306 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933317 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933325 4636 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933333 4636 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933341 4636 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933349 4636 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933376 4636 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933386 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933394 4636 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933402 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933410 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933419 4636 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933427 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933436 4636 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933464 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933474 4636 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933484 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933493 4636 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933502 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933510 4636 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933535 4636 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933546 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933555 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933563 4636 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933571 4636 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933579 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.933638 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934002 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934014 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934040 4636 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934053 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934065 4636 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934077 4636 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934089 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934100 4636 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934110 4636 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934121 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934132 4636 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934140 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934150 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934158 4636 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934167 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934177 4636 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934186 4636 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934194 4636 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934202 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934210 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934219 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934229 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934239 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934247 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934255 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934264 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934273 4636 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934282 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934290 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934298 4636 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934306 4636 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934314 4636 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934323 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934331 4636 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934340 4636 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934348 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934357 4636 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934365 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934374 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934381 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934390 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934398 4636 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934406 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934415 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934423 4636 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934431 4636 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934440 4636 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934448 4636 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934456 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934465 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934474 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934482 4636 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934490 4636 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934497 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934505 4636 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934513 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934521 4636 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934530 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934538 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934545 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934554 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934562 4636 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934570 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934577 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934586 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934595 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934603 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934610 4636 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934619 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934627 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934635 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934644 4636 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934653 4636 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934665 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934679 4636 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934689 4636 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934698 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934706 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934716 4636 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934726 4636 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934736 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934807 4636 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934822 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934833 4636 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934844 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934853 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934861 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934871 4636 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934880 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934889 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934897 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934905 4636 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934913 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.934921 4636 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.935144 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.935301 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.935330 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.935347 4636 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.935410 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:50.435390237 +0000 UTC m=+21.758398256 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.937948 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.937978 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.937993 4636 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:49 crc kubenswrapper[4636]: E1002 21:23:49.938054 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:50.43803481 +0000 UTC m=+21.761042829 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.940994 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.972161 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.980135 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.981910 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:23:49 crc kubenswrapper[4636]: I1002 21:23:49.983484 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.008942 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.030908 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.035808 4636 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.035832 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.035842 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.050224 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.051209 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.059543 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 21:23:50 crc kubenswrapper[4636]: W1002 21:23:50.063879 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-0195dbc85cc90304de1bbd217ea539baf5be52424504704822cd7c2bc8a195b0 WatchSource:0}: Error finding container 0195dbc85cc90304de1bbd217ea539baf5be52424504704822cd7c2bc8a195b0: Status 404 returned error can't find the container with id 0195dbc85cc90304de1bbd217ea539baf5be52424504704822cd7c2bc8a195b0 Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.070484 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.088307 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.128063 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-586cm"] Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.128414 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-586cm" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.132797 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.133138 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.133276 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.138964 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.188357 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.218356 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.230788 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.242646 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmxxf\" (UniqueName: \"kubernetes.io/projected/520aa252-e0e0-47e8-bb4c-55579fcfd286-kube-api-access-cmxxf\") pod \"node-resolver-586cm\" (UID: \"520aa252-e0e0-47e8-bb4c-55579fcfd286\") " pod="openshift-dns/node-resolver-586cm" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.242699 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/520aa252-e0e0-47e8-bb4c-55579fcfd286-hosts-file\") pod \"node-resolver-586cm\" (UID: \"520aa252-e0e0-47e8-bb4c-55579fcfd286\") " pod="openshift-dns/node-resolver-586cm" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.244054 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.266135 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.284056 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.299072 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.310450 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.319705 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.343759 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.343847 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmxxf\" (UniqueName: \"kubernetes.io/projected/520aa252-e0e0-47e8-bb4c-55579fcfd286-kube-api-access-cmxxf\") pod \"node-resolver-586cm\" (UID: \"520aa252-e0e0-47e8-bb4c-55579fcfd286\") " pod="openshift-dns/node-resolver-586cm" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.343872 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/520aa252-e0e0-47e8-bb4c-55579fcfd286-hosts-file\") pod \"node-resolver-586cm\" (UID: \"520aa252-e0e0-47e8-bb4c-55579fcfd286\") " pod="openshift-dns/node-resolver-586cm" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.343958 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/520aa252-e0e0-47e8-bb4c-55579fcfd286-hosts-file\") pod \"node-resolver-586cm\" (UID: \"520aa252-e0e0-47e8-bb4c-55579fcfd286\") " pod="openshift-dns/node-resolver-586cm" Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.344036 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:23:51.344016784 +0000 UTC m=+22.667024803 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.362917 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmxxf\" (UniqueName: \"kubernetes.io/projected/520aa252-e0e0-47e8-bb4c-55579fcfd286-kube-api-access-cmxxf\") pod \"node-resolver-586cm\" (UID: \"520aa252-e0e0-47e8-bb4c-55579fcfd286\") " pod="openshift-dns/node-resolver-586cm" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.444320 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.444368 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.444401 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.444435 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444529 4636 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444531 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444567 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444578 4636 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444596 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:51.444572707 +0000 UTC m=+22.767580726 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444639 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:51.444618058 +0000 UTC m=+22.767626077 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444702 4636 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444715 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444732 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444767 4636 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444734 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:51.444727751 +0000 UTC m=+22.767735760 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.444816 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:51.444807233 +0000 UTC m=+22.767815352 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.468456 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-586cm" Oct 02 21:23:50 crc kubenswrapper[4636]: W1002 21:23:50.493221 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod520aa252_e0e0_47e8_bb4c_55579fcfd286.slice/crio-0aaa4bd157231cfec4a368be12c848d2a47e0dacab96996e72b356c9c4a6c3a7 WatchSource:0}: Error finding container 0aaa4bd157231cfec4a368be12c848d2a47e0dacab96996e72b356c9c4a6c3a7: Status 404 returned error can't find the container with id 0aaa4bd157231cfec4a368be12c848d2a47e0dacab96996e72b356c9c4a6c3a7 Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.743397 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17"} Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.743455 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a"} Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.743467 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e55889d2134490b1f716fc4083b2f3697099d68e3475f8f540b77b73bff487bc"} Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.745556 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081"} Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.745583 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"0195dbc85cc90304de1bbd217ea539baf5be52424504704822cd7c2bc8a195b0"} Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.747355 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.747820 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.749607 4636 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b" exitCode=255 Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.749675 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b"} Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.749739 4636 scope.go:117] "RemoveContainer" containerID="351a619970bd23ab2ef54bbbd68e4971df488c991c2095798a140f906ab7271b" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.751303 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-586cm" event={"ID":"520aa252-e0e0-47e8-bb4c-55579fcfd286","Type":"ContainerStarted","Data":"6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd"} Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.751329 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-586cm" event={"ID":"520aa252-e0e0-47e8-bb4c-55579fcfd286","Type":"ContainerStarted","Data":"0aaa4bd157231cfec4a368be12c848d2a47e0dacab96996e72b356c9c4a6c3a7"} Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.752629 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9fd4cd738f1ccf0e13359962ba0cc5cbe12ab5d432961e6a28018c16628e5da8"} Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.764972 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.784993 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.797530 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.801726 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.808720 4636 scope.go:117] "RemoveContainer" containerID="b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b" Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.809152 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.820835 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.832185 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.841719 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.856219 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.866687 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.877946 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.888929 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.901298 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.910792 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.921244 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://351a619970bd23ab2ef54bbbd68e4971df488c991c2095798a140f906ab7271b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:43Z\\\",\\\"message\\\":\\\"W1002 21:23:33.045885 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1002 21:23:33.046240 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759440213 cert, and key in /tmp/serving-cert-2664177270/serving-signer.crt, /tmp/serving-cert-2664177270/serving-signer.key\\\\nI1002 21:23:33.273101 1 observer_polling.go:159] Starting file observer\\\\nW1002 21:23:33.276952 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1002 21:23:33.279312 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 21:23:33.280498 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2664177270/tls.crt::/tmp/serving-cert-2664177270/tls.key\\\\\\\"\\\\nF1002 21:23:43.479848 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.932473 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.938431 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-2l2mr"] Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.938813 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.942192 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.943481 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-9qm8w"] Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.944156 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.944592 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-895mm"] Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.944871 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.944915 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-895mm" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.949362 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.949541 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 02 21:23:50 crc kubenswrapper[4636]: W1002 21:23:50.949771 4636 reflector.go:561] object-"openshift-multus"/"multus-daemon-config": failed to list *v1.ConfigMap: configmaps "multus-daemon-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.949861 4636 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-daemon-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"multus-daemon-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.949916 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 02 21:23:50 crc kubenswrapper[4636]: W1002 21:23:50.950009 4636 reflector.go:561] object-"openshift-multus"/"default-dockercfg-2q5b6": failed to list *v1.Secret: secrets "default-dockercfg-2q5b6" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Oct 02 21:23:50 crc kubenswrapper[4636]: E1002 21:23:50.950071 4636 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-dockercfg-2q5b6\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-2q5b6\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.950093 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.950333 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.950514 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.950159 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.951734 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.963970 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.980666 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:50 crc kubenswrapper[4636]: I1002 21:23:50.990624 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.007238 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.030946 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.041013 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.048861 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7a136ab0-a86b-4cf4-a332-8c569e1ca777-rootfs\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.048899 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-var-lib-cni-multus\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.048921 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-cni-binary-copy\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.048937 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-system-cni-dir\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.048979 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-cnibin\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049004 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-os-release\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049026 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-var-lib-kubelet\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049041 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7a136ab0-a86b-4cf4-a332-8c569e1ca777-proxy-tls\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049055 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-run-k8s-cni-cncf-io\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049069 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-var-lib-cni-bin\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049082 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-hostroot\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049097 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-daemon-config\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049110 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhh2l\" (UniqueName: \"kubernetes.io/projected/3a64b152-90d7-4dd0-be73-17e987476a1c-kube-api-access-bhh2l\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049126 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7a136ab0-a86b-4cf4-a332-8c569e1ca777-mcd-auth-proxy-config\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049143 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-cnibin\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049163 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxkc6\" (UniqueName: \"kubernetes.io/projected/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-kube-api-access-rxkc6\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049180 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-socket-dir-parent\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049194 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-cni-dir\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049208 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-conf-dir\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049224 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-etc-kubernetes\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049265 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-run-netns\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049278 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-os-release\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049295 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3a64b152-90d7-4dd0-be73-17e987476a1c-cni-binary-copy\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049318 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049582 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049596 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-run-multus-certs\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049610 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs22p\" (UniqueName: \"kubernetes.io/projected/7a136ab0-a86b-4cf4-a332-8c569e1ca777-kube-api-access-rs22p\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.049625 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-system-cni-dir\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.057075 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.069288 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.080715 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.095099 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.106229 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.116270 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.130429 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.150489 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-os-release\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.150847 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-var-lib-kubelet\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.150872 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-run-k8s-cni-cncf-io\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.150952 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-var-lib-cni-bin\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151022 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-hostroot\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151040 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-daemon-config\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.150933 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-run-k8s-cni-cncf-io\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.150913 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-var-lib-kubelet\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151109 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-hostroot\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.150990 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-var-lib-cni-bin\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151055 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhh2l\" (UniqueName: \"kubernetes.io/projected/3a64b152-90d7-4dd0-be73-17e987476a1c-kube-api-access-bhh2l\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151214 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7a136ab0-a86b-4cf4-a332-8c569e1ca777-proxy-tls\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151233 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7a136ab0-a86b-4cf4-a332-8c569e1ca777-mcd-auth-proxy-config\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151928 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-cnibin\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151949 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxkc6\" (UniqueName: \"kubernetes.io/projected/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-kube-api-access-rxkc6\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151968 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-socket-dir-parent\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151983 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-conf-dir\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151997 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-etc-kubernetes\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152012 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-cni-dir\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152035 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-run-netns\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152083 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-os-release\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152099 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3a64b152-90d7-4dd0-be73-17e987476a1c-cni-binary-copy\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152125 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152142 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152157 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-run-multus-certs\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152172 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-system-cni-dir\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152187 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs22p\" (UniqueName: \"kubernetes.io/projected/7a136ab0-a86b-4cf4-a332-8c569e1ca777-kube-api-access-rs22p\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152203 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7a136ab0-a86b-4cf4-a332-8c569e1ca777-rootfs\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152218 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-var-lib-cni-multus\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152235 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-cni-binary-copy\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152249 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-system-cni-dir\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152263 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-cnibin\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152308 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-cnibin\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.150810 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-os-release\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152343 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-cnibin\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.151888 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7a136ab0-a86b-4cf4-a332-8c569e1ca777-mcd-auth-proxy-config\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152639 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-socket-dir-parent\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.152664 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-conf-dir\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.156319 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-run-multus-certs\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.156400 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-etc-kubernetes\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.156483 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-cni-dir\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.156517 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-run-netns\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.156564 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-os-release\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.156676 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7a136ab0-a86b-4cf4-a332-8c569e1ca777-proxy-tls\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.156855 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-host-var-lib-cni-multus\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.156908 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-system-cni-dir\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.157214 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7a136ab0-a86b-4cf4-a332-8c569e1ca777-rootfs\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.157293 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3a64b152-90d7-4dd0-be73-17e987476a1c-cni-binary-copy\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.157314 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3a64b152-90d7-4dd0-be73-17e987476a1c-system-cni-dir\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.157908 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-cni-binary-copy\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.158265 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.163989 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://351a619970bd23ab2ef54bbbd68e4971df488c991c2095798a140f906ab7271b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:43Z\\\",\\\"message\\\":\\\"W1002 21:23:33.045885 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1002 21:23:33.046240 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759440213 cert, and key in /tmp/serving-cert-2664177270/serving-signer.crt, /tmp/serving-cert-2664177270/serving-signer.key\\\\nI1002 21:23:33.273101 1 observer_polling.go:159] Starting file observer\\\\nW1002 21:23:33.276952 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1002 21:23:33.279312 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 21:23:33.280498 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2664177270/tls.crt::/tmp/serving-cert-2664177270/tls.key\\\\\\\"\\\\nF1002 21:23:43.479848 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.176245 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.178031 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxkc6\" (UniqueName: \"kubernetes.io/projected/4c1f289f-fb2b-4fd8-a6a3-2c573fff134a-kube-api-access-rxkc6\") pod \"multus-additional-cni-plugins-9qm8w\" (UID: \"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\") " pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.194408 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs22p\" (UniqueName: \"kubernetes.io/projected/7a136ab0-a86b-4cf4-a332-8c569e1ca777-kube-api-access-rs22p\") pod \"machine-config-daemon-2l2mr\" (UID: \"7a136ab0-a86b-4cf4-a332-8c569e1ca777\") " pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.194763 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.199280 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhh2l\" (UniqueName: \"kubernetes.io/projected/3a64b152-90d7-4dd0-be73-17e987476a1c-kube-api-access-bhh2l\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.249993 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.259913 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" Oct 02 21:23:51 crc kubenswrapper[4636]: W1002 21:23:51.262743 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a136ab0_a86b_4cf4_a332_8c569e1ca777.slice/crio-6e68fc2ead266ec5c5d1714bb9017d63b978451ff9ac22d1d42f3a7ee4c23c79 WatchSource:0}: Error finding container 6e68fc2ead266ec5c5d1714bb9017d63b978451ff9ac22d1d42f3a7ee4c23c79: Status 404 returned error can't find the container with id 6e68fc2ead266ec5c5d1714bb9017d63b978451ff9ac22d1d42f3a7ee4c23c79 Oct 02 21:23:51 crc kubenswrapper[4636]: W1002 21:23:51.282352 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c1f289f_fb2b_4fd8_a6a3_2c573fff134a.slice/crio-543c399587736b4cad7aba49711c5e01dee3708f1256981bd3d8fad7b46d450b WatchSource:0}: Error finding container 543c399587736b4cad7aba49711c5e01dee3708f1256981bd3d8fad7b46d450b: Status 404 returned error can't find the container with id 543c399587736b4cad7aba49711c5e01dee3708f1256981bd3d8fad7b46d450b Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.321162 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l7qm8"] Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.322124 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.325272 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.325348 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.325479 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.328294 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.328468 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.328611 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.334432 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.346134 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://351a619970bd23ab2ef54bbbd68e4971df488c991c2095798a140f906ab7271b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:43Z\\\",\\\"message\\\":\\\"W1002 21:23:33.045885 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1002 21:23:33.046240 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759440213 cert, and key in /tmp/serving-cert-2664177270/serving-signer.crt, /tmp/serving-cert-2664177270/serving-signer.key\\\\nI1002 21:23:33.273101 1 observer_polling.go:159] Starting file observer\\\\nW1002 21:23:33.276952 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1002 21:23:33.279312 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 21:23:33.280498 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2664177270/tls.crt::/tmp/serving-cert-2664177270/tls.key\\\\\\\"\\\\nF1002 21:23:43.479848 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.353943 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.354152 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:23:53.354108475 +0000 UTC m=+24.677116494 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.368072 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.386424 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.418498 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.439773 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.451655 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.454943 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-systemd-units\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.454982 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-systemd\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455003 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-log-socket\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455035 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455061 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455080 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-openvswitch\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455104 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455129 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-var-lib-openvswitch\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455149 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-netns\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455169 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-ovn\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455191 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shwzf\" (UniqueName: \"kubernetes.io/projected/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-kube-api-access-shwzf\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455213 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455234 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-bin\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455241 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455251 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455263 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455270 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-netd\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455277 4636 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455297 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-kubelet\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455327 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:53.455308445 +0000 UTC m=+24.778316464 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455322 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455360 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455417 4636 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455356 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-node-log\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455483 4636 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455562 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:53.455539262 +0000 UTC m=+24.778547281 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455579 4636 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455597 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:53.455589343 +0000 UTC m=+24.778597362 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455594 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-slash\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455649 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-etc-openvswitch\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.455685 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:53.455662705 +0000 UTC m=+24.778670714 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455705 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-env-overrides\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455726 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovn-node-metrics-cert\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455762 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-ovn-kubernetes\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455779 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-config\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.455800 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-script-lib\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.461519 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.470728 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.484920 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.502423 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.513297 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.526420 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.535980 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556549 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-systemd-units\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556584 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-systemd\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556598 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-log-socket\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556631 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-openvswitch\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556659 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-var-lib-openvswitch\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556678 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-netns\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556693 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-ovn\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556688 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-systemd-units\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556709 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shwzf\" (UniqueName: \"kubernetes.io/projected/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-kube-api-access-shwzf\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556704 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-systemd\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556792 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-bin\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556798 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-var-lib-openvswitch\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556810 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556829 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-netd\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556833 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-log-socket\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556859 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-netd\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556876 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-kubelet\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556882 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-bin\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556911 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-node-log\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556940 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-openvswitch\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556944 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-slash\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556965 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-node-log\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556988 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-kubelet\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556970 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-etc-openvswitch\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.557010 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-netns\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.557027 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-env-overrides\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.557035 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-ovn\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.557046 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovn-node-metrics-cert\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.556902 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.557057 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-slash\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.557067 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-ovn-kubernetes\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.557088 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-config\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.557112 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-script-lib\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.557277 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-ovn-kubernetes\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.557316 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-etc-openvswitch\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.557538 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-env-overrides\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.558295 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-script-lib\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.558411 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-config\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.560211 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovn-node-metrics-cert\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.579361 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shwzf\" (UniqueName: \"kubernetes.io/projected/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-kube-api-access-shwzf\") pod \"ovnkube-node-l7qm8\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.605397 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.605524 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.605903 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.605953 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.605991 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.606031 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.608220 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.608868 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.609808 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.610581 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.615858 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.616845 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.617819 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.618576 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.619959 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.620495 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.621654 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.622583 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.623661 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.624297 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.625680 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.626302 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.626953 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.629205 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.630040 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.632318 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.632884 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.633450 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.634391 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.635028 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.635060 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.636996 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.637604 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.638605 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.639137 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.640096 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.640661 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.641224 4636 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.641362 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.644905 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.645548 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.646690 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.648603 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.649478 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.650659 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.651502 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.652923 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.653489 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.654265 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.655510 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.656760 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.657357 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.658613 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.659882 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.661392 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.662834 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.663543 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.664711 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.665621 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.666287 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.668470 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.756792 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488"} Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.756842 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"6e68fc2ead266ec5c5d1714bb9017d63b978451ff9ac22d1d42f3a7ee4c23c79"} Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.759024 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" event={"ID":"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a","Type":"ContainerStarted","Data":"28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff"} Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.759057 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" event={"ID":"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a","Type":"ContainerStarted","Data":"543c399587736b4cad7aba49711c5e01dee3708f1256981bd3d8fad7b46d450b"} Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.760564 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.763255 4636 scope.go:117] "RemoveContainer" containerID="b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b" Oct 02 21:23:51 crc kubenswrapper[4636]: E1002 21:23:51.763439 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.764474 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"b706a209b6c9797279961e88fcdf992ef0f94f3cadbccc28b39b4a1f4c242af5"} Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.774271 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.787802 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.795669 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.805651 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.816857 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.828893 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.847631 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.873794 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.894306 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.912663 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.935987 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.950371 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.959618 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:51 crc kubenswrapper[4636]: I1002 21:23:51.972566 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 21:23:52 crc kubenswrapper[4636]: E1002 21:23:52.151788 4636 configmap.go:193] Couldn't get configMap openshift-multus/multus-daemon-config: failed to sync configmap cache: timed out waiting for the condition Oct 02 21:23:52 crc kubenswrapper[4636]: E1002 21:23:52.151920 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-daemon-config podName:3a64b152-90d7-4dd0-be73-17e987476a1c nodeName:}" failed. No retries permitted until 2025-10-02 21:23:52.651889113 +0000 UTC m=+23.974897142 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "multus-daemon-config" (UniqueName: "kubernetes.io/configmap/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-daemon-config") pod "multus-895mm" (UID: "3a64b152-90d7-4dd0-be73-17e987476a1c") : failed to sync configmap cache: timed out waiting for the condition Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.498480 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.519437 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.667314 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-daemon-config\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.668093 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3a64b152-90d7-4dd0-be73-17e987476a1c-multus-daemon-config\") pod \"multus-895mm\" (UID: \"3a64b152-90d7-4dd0-be73-17e987476a1c\") " pod="openshift-multus/multus-895mm" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.763841 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-895mm" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.770165 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec"} Oct 02 21:23:52 crc kubenswrapper[4636]: W1002 21:23:52.776487 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a64b152_90d7_4dd0_be73_17e987476a1c.slice/crio-78ba675cbf4ffcadcaadf5240296ccd796e4f50fabb64a5a3e1bf854629f75f6 WatchSource:0}: Error finding container 78ba675cbf4ffcadcaadf5240296ccd796e4f50fabb64a5a3e1bf854629f75f6: Status 404 returned error can't find the container with id 78ba675cbf4ffcadcaadf5240296ccd796e4f50fabb64a5a3e1bf854629f75f6 Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.777587 4636 generic.go:334] "Generic (PLEG): container finished" podID="4c1f289f-fb2b-4fd8-a6a3-2c573fff134a" containerID="28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff" exitCode=0 Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.777696 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" event={"ID":"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a","Type":"ContainerDied","Data":"28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff"} Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.785311 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966"} Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.790463 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701" exitCode=0 Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.791370 4636 scope.go:117] "RemoveContainer" containerID="b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b" Oct 02 21:23:52 crc kubenswrapper[4636]: E1002 21:23:52.791661 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.791903 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701"} Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.797277 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.815249 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-2zt4n"] Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.815724 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2zt4n" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.816041 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.819280 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.819386 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.819625 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.819864 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.846447 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.868610 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fbf92ff8-8b9e-4235-bf6f-8687e1812deb-host\") pod \"node-ca-2zt4n\" (UID: \"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\") " pod="openshift-image-registry/node-ca-2zt4n" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.868667 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fbf92ff8-8b9e-4235-bf6f-8687e1812deb-serviceca\") pod \"node-ca-2zt4n\" (UID: \"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\") " pod="openshift-image-registry/node-ca-2zt4n" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.868695 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frpcs\" (UniqueName: \"kubernetes.io/projected/fbf92ff8-8b9e-4235-bf6f-8687e1812deb-kube-api-access-frpcs\") pod \"node-ca-2zt4n\" (UID: \"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\") " pod="openshift-image-registry/node-ca-2zt4n" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.875591 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.891949 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.909073 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.930512 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.946789 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.960271 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.970269 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fbf92ff8-8b9e-4235-bf6f-8687e1812deb-host\") pod \"node-ca-2zt4n\" (UID: \"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\") " pod="openshift-image-registry/node-ca-2zt4n" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.970324 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fbf92ff8-8b9e-4235-bf6f-8687e1812deb-serviceca\") pod \"node-ca-2zt4n\" (UID: \"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\") " pod="openshift-image-registry/node-ca-2zt4n" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.970360 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frpcs\" (UniqueName: \"kubernetes.io/projected/fbf92ff8-8b9e-4235-bf6f-8687e1812deb-kube-api-access-frpcs\") pod \"node-ca-2zt4n\" (UID: \"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\") " pod="openshift-image-registry/node-ca-2zt4n" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.971172 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fbf92ff8-8b9e-4235-bf6f-8687e1812deb-host\") pod \"node-ca-2zt4n\" (UID: \"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\") " pod="openshift-image-registry/node-ca-2zt4n" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.972638 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fbf92ff8-8b9e-4235-bf6f-8687e1812deb-serviceca\") pod \"node-ca-2zt4n\" (UID: \"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\") " pod="openshift-image-registry/node-ca-2zt4n" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.973817 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.989961 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:52 crc kubenswrapper[4636]: I1002 21:23:52.995018 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frpcs\" (UniqueName: \"kubernetes.io/projected/fbf92ff8-8b9e-4235-bf6f-8687e1812deb-kube-api-access-frpcs\") pod \"node-ca-2zt4n\" (UID: \"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\") " pod="openshift-image-registry/node-ca-2zt4n" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.006532 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.018651 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.030144 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.046170 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.065344 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.075736 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.088835 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.112033 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.129633 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.140371 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2zt4n" Oct 02 21:23:53 crc kubenswrapper[4636]: W1002 21:23:53.152324 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbf92ff8_8b9e_4235_bf6f_8687e1812deb.slice/crio-c074fc7bcec7b4bdc9d950aef70f03135b5d88eeb02eb06ea410401246838623 WatchSource:0}: Error finding container c074fc7bcec7b4bdc9d950aef70f03135b5d88eeb02eb06ea410401246838623: Status 404 returned error can't find the container with id c074fc7bcec7b4bdc9d950aef70f03135b5d88eeb02eb06ea410401246838623 Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.166798 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.201073 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.220772 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.234408 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.296175 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.318977 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.345381 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.372778 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.372970 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:23:57.372930131 +0000 UTC m=+28.695938150 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.473599 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.473658 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.473684 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.473705 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.473809 4636 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.473859 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:57.473844874 +0000 UTC m=+28.796852893 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.474037 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.474091 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.474105 4636 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.474128 4636 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.474185 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.474217 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.474230 4636 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.474202 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:57.474165423 +0000 UTC m=+28.797173442 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.474325 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:57.474301067 +0000 UTC m=+28.797309076 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.474339 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 21:23:57.474333748 +0000 UTC m=+28.797341767 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.602673 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.602777 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.602828 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.602791 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.603028 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:23:53 crc kubenswrapper[4636]: E1002 21:23:53.603186 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.803853 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-895mm" event={"ID":"3a64b152-90d7-4dd0-be73-17e987476a1c","Type":"ContainerStarted","Data":"93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1"} Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.803911 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-895mm" event={"ID":"3a64b152-90d7-4dd0-be73-17e987476a1c","Type":"ContainerStarted","Data":"78ba675cbf4ffcadcaadf5240296ccd796e4f50fabb64a5a3e1bf854629f75f6"} Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.805468 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2zt4n" event={"ID":"fbf92ff8-8b9e-4235-bf6f-8687e1812deb","Type":"ContainerStarted","Data":"af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470"} Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.805594 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2zt4n" event={"ID":"fbf92ff8-8b9e-4235-bf6f-8687e1812deb","Type":"ContainerStarted","Data":"c074fc7bcec7b4bdc9d950aef70f03135b5d88eeb02eb06ea410401246838623"} Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.814098 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850"} Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.814288 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71"} Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.814352 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814"} Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.816313 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" event={"ID":"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a","Type":"ContainerStarted","Data":"71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32"} Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.819413 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.835600 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.851201 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.866246 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.879811 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.898525 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.918109 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.930526 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.942232 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.954396 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.968417 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.981153 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:53 crc kubenswrapper[4636]: I1002 21:23:53.990632 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.006582 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.022813 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.039320 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.051059 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.064737 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.076158 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.089124 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.098927 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.117713 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.136182 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.152310 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.169728 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.187258 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.208065 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.219107 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.824253 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec"} Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.824326 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3"} Oct 02 21:23:54 crc kubenswrapper[4636]: I1002 21:23:54.824346 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f"} Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.345917 4636 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.348733 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.348860 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.348876 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.348998 4636 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.356431 4636 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.356815 4636 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.357966 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.358004 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.358017 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.358039 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.358054 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:55Z","lastTransitionTime":"2025-10-02T21:23:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:55 crc kubenswrapper[4636]: E1002 21:23:55.378171 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.382066 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.382111 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.382120 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.382144 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.382157 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:55Z","lastTransitionTime":"2025-10-02T21:23:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:55 crc kubenswrapper[4636]: E1002 21:23:55.393999 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.397670 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.397721 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.397735 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.397776 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.397794 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:55Z","lastTransitionTime":"2025-10-02T21:23:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:55 crc kubenswrapper[4636]: E1002 21:23:55.408793 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.411925 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.411956 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.411965 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.411981 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.411991 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:55Z","lastTransitionTime":"2025-10-02T21:23:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:55 crc kubenswrapper[4636]: E1002 21:23:55.423058 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.426025 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.426048 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.426060 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.426075 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.426085 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:55Z","lastTransitionTime":"2025-10-02T21:23:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:55 crc kubenswrapper[4636]: E1002 21:23:55.442053 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: E1002 21:23:55.442189 4636 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.444472 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.444520 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.444532 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.444552 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.444565 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:55Z","lastTransitionTime":"2025-10-02T21:23:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.547562 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.547607 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.547617 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.547632 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.547642 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:55Z","lastTransitionTime":"2025-10-02T21:23:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.603275 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.603402 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.603301 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:23:55 crc kubenswrapper[4636]: E1002 21:23:55.603552 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:23:55 crc kubenswrapper[4636]: E1002 21:23:55.603653 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:23:55 crc kubenswrapper[4636]: E1002 21:23:55.604104 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.650248 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.650301 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.650314 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.650333 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.650367 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:55Z","lastTransitionTime":"2025-10-02T21:23:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.753606 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.753669 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.753678 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.753700 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.753709 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:55Z","lastTransitionTime":"2025-10-02T21:23:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.832451 4636 generic.go:334] "Generic (PLEG): container finished" podID="4c1f289f-fb2b-4fd8-a6a3-2c573fff134a" containerID="71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32" exitCode=0 Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.832513 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" event={"ID":"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a","Type":"ContainerDied","Data":"71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32"} Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.852817 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.856875 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.856906 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.856916 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.856933 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.856943 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:55Z","lastTransitionTime":"2025-10-02T21:23:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.872057 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.901904 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.919765 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.935736 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.955506 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.960438 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.960487 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.960501 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.960523 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.960539 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:55Z","lastTransitionTime":"2025-10-02T21:23:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.976589 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:55 crc kubenswrapper[4636]: I1002 21:23:55.992223 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:55Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.001927 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.017694 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.039244 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.056507 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.069207 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.069253 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.069292 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.069314 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.069325 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:56Z","lastTransitionTime":"2025-10-02T21:23:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.075568 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.092710 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.173678 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.173728 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.173738 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.173778 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.173791 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:56Z","lastTransitionTime":"2025-10-02T21:23:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.277245 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.277584 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.277602 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.277625 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.277644 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:56Z","lastTransitionTime":"2025-10-02T21:23:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.380464 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.380517 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.380535 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.380571 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.380596 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:56Z","lastTransitionTime":"2025-10-02T21:23:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.487321 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.487366 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.487377 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.487398 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.487412 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:56Z","lastTransitionTime":"2025-10-02T21:23:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.592294 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.592346 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.592364 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.592386 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.592399 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:56Z","lastTransitionTime":"2025-10-02T21:23:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.695940 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.696014 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.696032 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.696063 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.696082 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:56Z","lastTransitionTime":"2025-10-02T21:23:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.798081 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.798362 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.798374 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.798391 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.798401 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:56Z","lastTransitionTime":"2025-10-02T21:23:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.838993 4636 generic.go:334] "Generic (PLEG): container finished" podID="4c1f289f-fb2b-4fd8-a6a3-2c573fff134a" containerID="c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1" exitCode=0 Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.839084 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" event={"ID":"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a","Type":"ContainerDied","Data":"c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1"} Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.848128 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282"} Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.871088 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.891186 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.901294 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.901334 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.901351 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.901372 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.901389 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:56Z","lastTransitionTime":"2025-10-02T21:23:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.910344 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.938554 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.960181 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:56 crc kubenswrapper[4636]: I1002 21:23:56.981784 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.001062 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.005223 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.005286 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.005304 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.005332 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.005357 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:57Z","lastTransitionTime":"2025-10-02T21:23:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.042468 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.075206 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.092271 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.110463 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.110509 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.110518 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.110540 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.110550 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:57Z","lastTransitionTime":"2025-10-02T21:23:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.115483 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.137456 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.161410 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.171519 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.213191 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.213238 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.213251 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.213270 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.213281 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:57Z","lastTransitionTime":"2025-10-02T21:23:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.317739 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.317794 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.317806 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.317821 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.317830 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:57Z","lastTransitionTime":"2025-10-02T21:23:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.420642 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.420681 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.420690 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.420707 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.420717 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:57Z","lastTransitionTime":"2025-10-02T21:23:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.422209 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.422401 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:24:05.422370698 +0000 UTC m=+36.745378727 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.523321 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.523381 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.523401 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.523422 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523513 4636 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523515 4636 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.523534 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523578 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:05.523558969 +0000 UTC m=+36.846566988 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.523588 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.523613 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.523645 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.523670 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:57Z","lastTransitionTime":"2025-10-02T21:23:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523596 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:05.523589599 +0000 UTC m=+36.846597618 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523680 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523810 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523827 4636 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523881 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:05.523855327 +0000 UTC m=+36.846863546 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523709 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523912 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523922 4636 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.523950 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:05.523941629 +0000 UTC m=+36.846949878 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.603079 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.603118 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.603193 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.603278 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.603378 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:23:57 crc kubenswrapper[4636]: E1002 21:23:57.603587 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.626310 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.626351 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.626361 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.626379 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.626392 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:57Z","lastTransitionTime":"2025-10-02T21:23:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.729043 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.729090 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.729102 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.729121 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.729134 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:57Z","lastTransitionTime":"2025-10-02T21:23:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.831857 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.832265 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.832393 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.832482 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.832560 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:57Z","lastTransitionTime":"2025-10-02T21:23:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.854915 4636 generic.go:334] "Generic (PLEG): container finished" podID="4c1f289f-fb2b-4fd8-a6a3-2c573fff134a" containerID="e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d" exitCode=0 Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.855121 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" event={"ID":"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a","Type":"ContainerDied","Data":"e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d"} Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.874607 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.889545 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.903076 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.919461 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.937989 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.938044 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.938056 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.938090 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.938105 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:57Z","lastTransitionTime":"2025-10-02T21:23:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.939999 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.954884 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.972462 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:57 crc kubenswrapper[4636]: I1002 21:23:57.991108 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.009050 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.024119 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.042531 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.042584 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.042597 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.042619 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.042634 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:58Z","lastTransitionTime":"2025-10-02T21:23:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.049473 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.071558 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.094489 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.105200 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.147597 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.148324 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.148346 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.148377 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.148398 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:58Z","lastTransitionTime":"2025-10-02T21:23:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.252284 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.252333 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.252343 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.252364 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.252379 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:58Z","lastTransitionTime":"2025-10-02T21:23:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.355900 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.356313 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.356441 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.356618 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.356842 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:58Z","lastTransitionTime":"2025-10-02T21:23:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.460840 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.460904 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.460922 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.460950 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.460969 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:58Z","lastTransitionTime":"2025-10-02T21:23:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.564239 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.564486 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.564506 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.564537 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.564556 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:58Z","lastTransitionTime":"2025-10-02T21:23:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.668317 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.668838 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.669034 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.669201 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.669355 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:58Z","lastTransitionTime":"2025-10-02T21:23:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.772571 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.772615 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.772625 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.772643 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.772654 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:58Z","lastTransitionTime":"2025-10-02T21:23:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.877130 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.877515 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.877532 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.878011 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" event={"ID":"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a","Type":"ContainerStarted","Data":"7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b"} Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.878949 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.878977 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:58Z","lastTransitionTime":"2025-10-02T21:23:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.897058 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.913861 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.932741 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.953864 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.971488 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.981800 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.982126 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.982232 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.982342 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.982434 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:58Z","lastTransitionTime":"2025-10-02T21:23:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:58 crc kubenswrapper[4636]: I1002 21:23:58.987978 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:58Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.004262 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.019002 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.032383 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.047167 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.061526 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.076139 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.085842 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.085888 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.085908 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.085933 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.085951 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:59Z","lastTransitionTime":"2025-10-02T21:23:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.098930 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.118737 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.188690 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.188761 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.188780 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.188846 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.188863 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:59Z","lastTransitionTime":"2025-10-02T21:23:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.292088 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.292122 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.292135 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.292152 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.292166 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:59Z","lastTransitionTime":"2025-10-02T21:23:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.395091 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.395139 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.395157 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.395184 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.395203 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:59Z","lastTransitionTime":"2025-10-02T21:23:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.499584 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.501038 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.501069 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.501097 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.501114 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:59Z","lastTransitionTime":"2025-10-02T21:23:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.602987 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.603074 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:23:59 crc kubenswrapper[4636]: E1002 21:23:59.603193 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.603081 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:23:59 crc kubenswrapper[4636]: E1002 21:23:59.603312 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:23:59 crc kubenswrapper[4636]: E1002 21:23:59.603543 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.604723 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.604766 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.604779 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.604798 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.604812 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:59Z","lastTransitionTime":"2025-10-02T21:23:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.632287 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.658026 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.678550 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.708034 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.708383 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.708510 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.708615 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.708694 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:59Z","lastTransitionTime":"2025-10-02T21:23:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.716451 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.734923 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.750257 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.766382 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.791438 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.810060 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.812539 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.812611 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.812631 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.812661 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.812684 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:59Z","lastTransitionTime":"2025-10-02T21:23:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.828882 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.844811 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.863440 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.879932 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.889205 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da"} Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.890067 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.890126 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.895311 4636 generic.go:334] "Generic (PLEG): container finished" podID="4c1f289f-fb2b-4fd8-a6a3-2c573fff134a" containerID="7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b" exitCode=0 Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.895466 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" event={"ID":"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a","Type":"ContainerDied","Data":"7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b"} Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.908369 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.921183 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.921223 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.921235 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.921257 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.921273 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:23:59Z","lastTransitionTime":"2025-10-02T21:23:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.931430 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.931536 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.932838 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.955687 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.973100 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:23:59 crc kubenswrapper[4636]: I1002 21:23:59.988897 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:23:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.001853 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.017405 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.026019 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.026059 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.026070 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.026087 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.026100 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:00Z","lastTransitionTime":"2025-10-02T21:24:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.034005 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.049778 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.063932 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.081277 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.096743 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.119336 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.130591 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.130705 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.130792 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.130918 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.131005 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:00Z","lastTransitionTime":"2025-10-02T21:24:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.134151 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.151925 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.172943 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.184921 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.200261 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.212227 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.224738 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.234577 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.234708 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.235141 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.235210 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.235440 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:00Z","lastTransitionTime":"2025-10-02T21:24:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.243744 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.258741 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.277999 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.300825 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.317187 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.334224 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.338433 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.338657 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.338776 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.338857 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.338961 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:00Z","lastTransitionTime":"2025-10-02T21:24:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.350585 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.371993 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.392708 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.443135 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.443180 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.443190 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.443209 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.443224 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:00Z","lastTransitionTime":"2025-10-02T21:24:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.546303 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.546350 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.546363 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.546384 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.546398 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:00Z","lastTransitionTime":"2025-10-02T21:24:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.650028 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.650073 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.650087 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.650107 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.650121 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:00Z","lastTransitionTime":"2025-10-02T21:24:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.753128 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.753197 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.753213 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.753239 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.753256 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:00Z","lastTransitionTime":"2025-10-02T21:24:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.856782 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.856854 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.856869 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.856891 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.856906 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:00Z","lastTransitionTime":"2025-10-02T21:24:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.903258 4636 generic.go:334] "Generic (PLEG): container finished" podID="4c1f289f-fb2b-4fd8-a6a3-2c573fff134a" containerID="09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c" exitCode=0 Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.903330 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" event={"ID":"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a","Type":"ContainerDied","Data":"09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c"} Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.903454 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.923406 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.962245 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.966444 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.966498 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.966517 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.966546 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.966566 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:00Z","lastTransitionTime":"2025-10-02T21:24:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.981497 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:00 crc kubenswrapper[4636]: I1002 21:24:00.999262 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:00Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.015323 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.031070 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.053046 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.068947 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.068998 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.069018 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.069043 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.069063 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:01Z","lastTransitionTime":"2025-10-02T21:24:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.070035 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.086893 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.101691 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.124611 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.138173 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.153579 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.168147 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.175716 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.175795 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.175811 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.175856 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.175873 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:01Z","lastTransitionTime":"2025-10-02T21:24:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.279058 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.279107 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.279120 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.279140 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.279154 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:01Z","lastTransitionTime":"2025-10-02T21:24:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.382832 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.382882 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.382894 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.382914 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.382928 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:01Z","lastTransitionTime":"2025-10-02T21:24:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.485761 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.485802 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.485814 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.485835 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.485849 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:01Z","lastTransitionTime":"2025-10-02T21:24:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.588595 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.588660 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.588681 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.588704 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.588721 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:01Z","lastTransitionTime":"2025-10-02T21:24:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.603840 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.603862 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:01 crc kubenswrapper[4636]: E1002 21:24:01.603980 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:01 crc kubenswrapper[4636]: E1002 21:24:01.604072 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.604128 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:01 crc kubenswrapper[4636]: E1002 21:24:01.604175 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.692522 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.692584 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.692598 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.692619 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.692631 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:01Z","lastTransitionTime":"2025-10-02T21:24:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.795864 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.795926 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.795940 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.795964 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.795980 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:01Z","lastTransitionTime":"2025-10-02T21:24:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.898527 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.898781 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.898868 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.898986 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.899073 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:01Z","lastTransitionTime":"2025-10-02T21:24:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.912263 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" event={"ID":"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a","Type":"ContainerStarted","Data":"6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f"} Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.912427 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.934351 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.954353 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.970505 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:01 crc kubenswrapper[4636]: I1002 21:24:01.995237 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:01Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.002701 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.002734 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.002743 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.002777 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.002787 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:02Z","lastTransitionTime":"2025-10-02T21:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.027917 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.056346 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.079486 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.094552 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.106258 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.106301 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.106310 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.106337 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.106348 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:02Z","lastTransitionTime":"2025-10-02T21:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.106804 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.119210 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.132085 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.144392 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.156225 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.169192 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.217997 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.218048 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.218057 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.218080 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.218089 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:02Z","lastTransitionTime":"2025-10-02T21:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.322974 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.323018 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.323030 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.323056 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.323070 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:02Z","lastTransitionTime":"2025-10-02T21:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.425799 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.425864 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.425877 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.425893 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.425909 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:02Z","lastTransitionTime":"2025-10-02T21:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.528489 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.528525 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.528535 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.528551 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.528560 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:02Z","lastTransitionTime":"2025-10-02T21:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.632385 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.632459 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.632475 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.632502 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.632519 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:02Z","lastTransitionTime":"2025-10-02T21:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.735884 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.735952 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.735974 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.736003 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.736026 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:02Z","lastTransitionTime":"2025-10-02T21:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.839966 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.840032 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.840052 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.840077 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.840097 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:02Z","lastTransitionTime":"2025-10-02T21:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.920356 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/0.log" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.924435 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da" exitCode=1 Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.924481 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da"} Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.925377 4636 scope.go:117] "RemoveContainer" containerID="bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.944491 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.944554 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.944567 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.944590 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.944603 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:02Z","lastTransitionTime":"2025-10-02T21:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.944653 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.965138 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:02 crc kubenswrapper[4636]: I1002 21:24:02.994231 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:02Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.024241 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.044739 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.047287 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.047352 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.047369 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.047394 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.047412 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:03Z","lastTransitionTime":"2025-10-02T21:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.067774 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.116926 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.138312 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.149978 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.150041 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.150053 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.150077 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.150092 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:03Z","lastTransitionTime":"2025-10-02T21:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.159798 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.176918 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.192176 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.213410 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:02Z\\\",\\\"message\\\":\\\"rs/externalversions/factory.go:140\\\\nI1002 21:24:02.198121 5833 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 21:24:02.198130 5833 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 21:24:02.198136 5833 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 21:24:02.198142 5833 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1002 21:24:02.198149 5833 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1002 21:24:02.198203 5833 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 21:24:02.198115 5833 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 21:24:02.198562 5833 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.198808 5833 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.199190 5833 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1002 21:24:02.199595 5833 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.225678 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.240784 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.253637 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.253692 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.253704 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.253729 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.253767 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:03Z","lastTransitionTime":"2025-10-02T21:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.358191 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.358238 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.358250 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.358273 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.358288 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:03Z","lastTransitionTime":"2025-10-02T21:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.461659 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.461703 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.461718 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.461741 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.461772 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:03Z","lastTransitionTime":"2025-10-02T21:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.564509 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.564551 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.564560 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.564576 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.564585 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:03Z","lastTransitionTime":"2025-10-02T21:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.603231 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.603231 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:03 crc kubenswrapper[4636]: E1002 21:24:03.603373 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:03 crc kubenswrapper[4636]: E1002 21:24:03.603429 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.603516 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:03 crc kubenswrapper[4636]: E1002 21:24:03.603714 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.668180 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.668224 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.668235 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.668256 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.668281 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:03Z","lastTransitionTime":"2025-10-02T21:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.777090 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.777130 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.777142 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.777159 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.777170 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:03Z","lastTransitionTime":"2025-10-02T21:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.848387 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52"] Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.848818 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.850862 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.850984 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.871686 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.881084 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.881132 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.881147 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.881170 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.881188 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:03Z","lastTransitionTime":"2025-10-02T21:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.888920 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.891236 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.891374 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.891471 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfrf4\" (UniqueName: \"kubernetes.io/projected/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-kube-api-access-kfrf4\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.891545 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-env-overrides\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.913629 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.930249 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/0.log" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.933172 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6"} Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.933343 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.933894 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.951222 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.972227 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.984090 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.984148 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.984161 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.984504 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.984544 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:03Z","lastTransitionTime":"2025-10-02T21:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.987136 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:03Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.992081 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfrf4\" (UniqueName: \"kubernetes.io/projected/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-kube-api-access-kfrf4\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.992131 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-env-overrides\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.992184 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.992245 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.992831 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-env-overrides\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:03 crc kubenswrapper[4636]: I1002 21:24:03.993004 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.001725 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.011461 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.015415 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfrf4\" (UniqueName: \"kubernetes.io/projected/5e87285e-b0c2-4f4a-87b8-9244f8a6daaa-kube-api-access-kfrf4\") pod \"ovnkube-control-plane-749d76644c-kkf52\" (UID: \"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.033575 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:02Z\\\",\\\"message\\\":\\\"rs/externalversions/factory.go:140\\\\nI1002 21:24:02.198121 5833 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 21:24:02.198130 5833 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 21:24:02.198136 5833 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 21:24:02.198142 5833 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1002 21:24:02.198149 5833 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1002 21:24:02.198203 5833 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 21:24:02.198115 5833 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 21:24:02.198562 5833 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.198808 5833 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.199190 5833 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1002 21:24:02.199595 5833 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.045926 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.058113 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.069084 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.079835 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.087084 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.087136 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.087145 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.087166 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.087177 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:04Z","lastTransitionTime":"2025-10-02T21:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.094206 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.105874 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.116978 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.128321 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.141899 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.155663 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.163017 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.172176 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.190369 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.191774 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.191836 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.191858 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.191890 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.191909 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:04Z","lastTransitionTime":"2025-10-02T21:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.212606 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:02Z\\\",\\\"message\\\":\\\"rs/externalversions/factory.go:140\\\\nI1002 21:24:02.198121 5833 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 21:24:02.198130 5833 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 21:24:02.198136 5833 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 21:24:02.198142 5833 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1002 21:24:02.198149 5833 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1002 21:24:02.198203 5833 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 21:24:02.198115 5833 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 21:24:02.198562 5833 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.198808 5833 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.199190 5833 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1002 21:24:02.199595 5833 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.230087 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.241112 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.265505 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.286725 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.294689 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.294788 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.294814 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.294845 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.294864 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:04Z","lastTransitionTime":"2025-10-02T21:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.302996 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.325566 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.342565 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.362496 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.397912 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.397981 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.397998 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.398023 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.398039 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:04Z","lastTransitionTime":"2025-10-02T21:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.500650 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.500684 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.500692 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.500706 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.500716 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:04Z","lastTransitionTime":"2025-10-02T21:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.603415 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.603492 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.603518 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.603549 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.603571 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:04Z","lastTransitionTime":"2025-10-02T21:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.706512 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.706577 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.706597 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.706629 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.706649 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:04Z","lastTransitionTime":"2025-10-02T21:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.809911 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.809953 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.809963 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.809983 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.809996 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:04Z","lastTransitionTime":"2025-10-02T21:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.915921 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.916022 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.916048 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.916083 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.916106 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:04Z","lastTransitionTime":"2025-10-02T21:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.939745 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" event={"ID":"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa","Type":"ContainerStarted","Data":"c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.939878 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" event={"ID":"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa","Type":"ContainerStarted","Data":"530a4f1efb396ac4f188f4571f8607791afd5e3d6cb65980c4485ffa84eef95d"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.942695 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/1.log" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.943579 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/0.log" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.947615 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6" exitCode=1 Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.947678 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6"} Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.947743 4636 scope.go:117] "RemoveContainer" containerID="bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.953015 4636 scope.go:117] "RemoveContainer" containerID="9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6" Oct 02 21:24:04 crc kubenswrapper[4636]: E1002 21:24:04.953447 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.974466 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-zssg6"] Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.975140 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:04 crc kubenswrapper[4636]: E1002 21:24:04.975211 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.975360 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:04 crc kubenswrapper[4636]: I1002 21:24:04.991446 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:04Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.004080 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckrp6\" (UniqueName: \"kubernetes.io/projected/5e169ed7-2c2e-4623-9f21-330753911ab5-kube-api-access-ckrp6\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.004201 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.014987 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.021124 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.021170 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.021191 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.021223 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.021244 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.037398 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.055076 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.071294 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.090109 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.104649 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.105428 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckrp6\" (UniqueName: \"kubernetes.io/projected/5e169ed7-2c2e-4623-9f21-330753911ab5-kube-api-access-ckrp6\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.105489 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.105709 4636 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.105877 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs podName:5e169ed7-2c2e-4623-9f21-330753911ab5 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:05.605839898 +0000 UTC m=+36.928847927 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs") pod "network-metrics-daemon-zssg6" (UID: "5e169ed7-2c2e-4623-9f21-330753911ab5") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.124249 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.124313 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.124333 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.124362 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.124382 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.127291 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckrp6\" (UniqueName: \"kubernetes.io/projected/5e169ed7-2c2e-4623-9f21-330753911ab5-kube-api-access-ckrp6\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.130229 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:02Z\\\",\\\"message\\\":\\\"rs/externalversions/factory.go:140\\\\nI1002 21:24:02.198121 5833 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 21:24:02.198130 5833 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 21:24:02.198136 5833 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 21:24:02.198142 5833 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1002 21:24:02.198149 5833 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1002 21:24:02.198203 5833 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 21:24:02.198115 5833 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 21:24:02.198562 5833 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.198808 5833 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.199190 5833 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1002 21:24:02.199595 5833 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"lane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1002 21:24:03.907196 6016 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907210 6016 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907190 6016 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler-operator/metrics]} name:Service_openshift-kube-scheduler-o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.144995 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.160952 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.177624 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.195333 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.209267 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.228789 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.228863 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.228888 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.228921 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.228940 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.232042 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.253782 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.273433 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.293283 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.314716 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.332715 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.332777 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.332790 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.332811 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.332825 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.341341 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.356481 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.375261 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.392152 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.414061 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.435369 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.435415 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.435431 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.435455 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.435469 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.442052 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.455743 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.470092 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.491114 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:02Z\\\",\\\"message\\\":\\\"rs/externalversions/factory.go:140\\\\nI1002 21:24:02.198121 5833 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 21:24:02.198130 5833 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 21:24:02.198136 5833 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 21:24:02.198142 5833 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1002 21:24:02.198149 5833 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1002 21:24:02.198203 5833 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 21:24:02.198115 5833 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 21:24:02.198562 5833 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.198808 5833 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.199190 5833 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1002 21:24:02.199595 5833 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"lane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1002 21:24:03.907196 6016 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907210 6016 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907190 6016 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler-operator/metrics]} name:Service_openshift-kube-scheduler-o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.503159 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.509010 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.509235 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:24:21.509216919 +0000 UTC m=+52.832224938 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.515968 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.528180 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.538021 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.538050 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.538059 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.538076 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.538086 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.603618 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.603987 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.603806 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.604241 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.604461 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.604791 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.610297 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.610342 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.610372 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.610406 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.610429 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610455 4636 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610573 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:21.610541103 +0000 UTC m=+52.933549152 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610569 4636 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610617 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610615 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610636 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610649 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610651 4636 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610665 4636 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610684 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:21.610650476 +0000 UTC m=+52.933658535 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610569 4636 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610719 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:21.610704388 +0000 UTC m=+52.933712437 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610772 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:21.610733229 +0000 UTC m=+52.933741278 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.610814 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs podName:5e169ed7-2c2e-4623-9f21-330753911ab5 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:06.6107912 +0000 UTC m=+37.933799259 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs") pod "network-metrics-daemon-zssg6" (UID: "5e169ed7-2c2e-4623-9f21-330753911ab5") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.641067 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.641119 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.641138 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.641166 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.641186 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.646877 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.647045 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.647183 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.647273 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.647340 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.663064 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.667879 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.667939 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.667959 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.667985 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.668001 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.685102 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.691011 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.691308 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.691434 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.691536 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.691630 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.714394 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.720448 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.720504 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.720522 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.720548 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.720568 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.747009 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.753585 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.753682 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.753817 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.753859 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.753887 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.779259 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: E1002 21:24:05.779627 4636 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.781974 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.782045 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.782074 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.782104 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.782126 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.885708 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.885811 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.885838 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.885874 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.885897 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.955356 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/1.log" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.963680 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" event={"ID":"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa","Type":"ContainerStarted","Data":"b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262"} Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.985121 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:05Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.990900 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.990998 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.991055 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.991097 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:05 crc kubenswrapper[4636]: I1002 21:24:05.991123 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:05Z","lastTransitionTime":"2025-10-02T21:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.011622 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.027399 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.047883 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.070991 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:02Z\\\",\\\"message\\\":\\\"rs/externalversions/factory.go:140\\\\nI1002 21:24:02.198121 5833 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 21:24:02.198130 5833 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 21:24:02.198136 5833 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 21:24:02.198142 5833 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1002 21:24:02.198149 5833 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1002 21:24:02.198203 5833 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 21:24:02.198115 5833 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 21:24:02.198562 5833 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.198808 5833 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.199190 5833 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1002 21:24:02.199595 5833 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"lane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1002 21:24:03.907196 6016 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907210 6016 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907190 6016 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler-operator/metrics]} name:Service_openshift-kube-scheduler-o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.087358 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.095130 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.095197 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.095210 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.095229 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.095243 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:06Z","lastTransitionTime":"2025-10-02T21:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.106122 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.123108 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.138494 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.155404 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.169197 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.182175 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.198292 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.198471 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.198534 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.198604 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.198679 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:06Z","lastTransitionTime":"2025-10-02T21:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.202893 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.220143 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.240784 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.261522 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:06Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.302327 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.302389 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.302409 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.302439 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.302458 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:06Z","lastTransitionTime":"2025-10-02T21:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.406495 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.406833 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.406946 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.407068 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.407150 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:06Z","lastTransitionTime":"2025-10-02T21:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.511722 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.511830 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.511856 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.511887 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.511907 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:06Z","lastTransitionTime":"2025-10-02T21:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.602937 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:06 crc kubenswrapper[4636]: E1002 21:24:06.603124 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.604343 4636 scope.go:117] "RemoveContainer" containerID="b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.615961 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.616030 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.616054 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.616088 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.616116 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:06Z","lastTransitionTime":"2025-10-02T21:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.622513 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:06 crc kubenswrapper[4636]: E1002 21:24:06.622652 4636 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:06 crc kubenswrapper[4636]: E1002 21:24:06.622712 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs podName:5e169ed7-2c2e-4623-9f21-330753911ab5 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:08.622692981 +0000 UTC m=+39.945701010 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs") pod "network-metrics-daemon-zssg6" (UID: "5e169ed7-2c2e-4623-9f21-330753911ab5") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.719037 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.719093 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.719112 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.719141 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.719160 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:06Z","lastTransitionTime":"2025-10-02T21:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.821566 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.821609 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.821623 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.821642 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.821656 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:06Z","lastTransitionTime":"2025-10-02T21:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.926080 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.926161 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.926179 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.926250 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.926275 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:06Z","lastTransitionTime":"2025-10-02T21:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.976267 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 02 21:24:06 crc kubenswrapper[4636]: I1002 21:24:06.980799 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85"} Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.003688 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.022736 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.030433 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.030866 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.031099 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.031127 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.031244 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:07Z","lastTransitionTime":"2025-10-02T21:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.040683 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.052695 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.066646 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.086170 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:02Z\\\",\\\"message\\\":\\\"rs/externalversions/factory.go:140\\\\nI1002 21:24:02.198121 5833 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 21:24:02.198130 5833 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 21:24:02.198136 5833 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 21:24:02.198142 5833 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1002 21:24:02.198149 5833 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1002 21:24:02.198203 5833 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 21:24:02.198115 5833 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 21:24:02.198562 5833 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.198808 5833 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.199190 5833 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1002 21:24:02.199595 5833 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"lane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1002 21:24:03.907196 6016 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907210 6016 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907190 6016 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler-operator/metrics]} name:Service_openshift-kube-scheduler-o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.095887 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.116315 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.128568 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.134326 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.134372 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.134408 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.134434 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.134449 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:07Z","lastTransitionTime":"2025-10-02T21:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.143639 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.155855 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.168969 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.181735 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.194988 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.208640 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.224401 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:07Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.237721 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.237829 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.237854 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.237889 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.237914 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:07Z","lastTransitionTime":"2025-10-02T21:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.309124 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.340542 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.340672 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.340694 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.340777 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.340801 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:07Z","lastTransitionTime":"2025-10-02T21:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.443385 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.443464 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.443477 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.443500 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.443533 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:07Z","lastTransitionTime":"2025-10-02T21:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.546109 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.546223 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.546248 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.546287 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.546312 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:07Z","lastTransitionTime":"2025-10-02T21:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.603674 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.603674 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.603858 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:07 crc kubenswrapper[4636]: E1002 21:24:07.604356 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:07 crc kubenswrapper[4636]: E1002 21:24:07.605515 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:07 crc kubenswrapper[4636]: E1002 21:24:07.605617 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.649742 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.649812 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.649823 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.649843 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.649856 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:07Z","lastTransitionTime":"2025-10-02T21:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.751742 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.751785 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.751793 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.751807 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.751815 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:07Z","lastTransitionTime":"2025-10-02T21:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.854489 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.854537 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.854545 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.854562 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.854573 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:07Z","lastTransitionTime":"2025-10-02T21:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.958337 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.958387 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.958396 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.958413 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:07 crc kubenswrapper[4636]: I1002 21:24:07.958422 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:07Z","lastTransitionTime":"2025-10-02T21:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.061037 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.061087 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.061097 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.061118 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.061130 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:08Z","lastTransitionTime":"2025-10-02T21:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.163299 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.163693 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.163894 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.164044 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.164184 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:08Z","lastTransitionTime":"2025-10-02T21:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.268103 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.268172 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.268191 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.268221 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.268240 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:08Z","lastTransitionTime":"2025-10-02T21:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.370323 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.370391 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.370446 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.370467 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.370481 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:08Z","lastTransitionTime":"2025-10-02T21:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.473800 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.473873 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.473892 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.473931 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.473958 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:08Z","lastTransitionTime":"2025-10-02T21:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.578255 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.578606 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.578624 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.578675 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.578694 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:08Z","lastTransitionTime":"2025-10-02T21:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.603522 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:08 crc kubenswrapper[4636]: E1002 21:24:08.603791 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.646090 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:08 crc kubenswrapper[4636]: E1002 21:24:08.646324 4636 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:08 crc kubenswrapper[4636]: E1002 21:24:08.646493 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs podName:5e169ed7-2c2e-4623-9f21-330753911ab5 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:12.646456904 +0000 UTC m=+43.969465063 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs") pod "network-metrics-daemon-zssg6" (UID: "5e169ed7-2c2e-4623-9f21-330753911ab5") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.681743 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.681873 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.681893 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.681921 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.681940 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:08Z","lastTransitionTime":"2025-10-02T21:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.784952 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.785028 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.785045 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.785075 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.785094 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:08Z","lastTransitionTime":"2025-10-02T21:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.888874 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.888965 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.888992 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.889023 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.889045 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:08Z","lastTransitionTime":"2025-10-02T21:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.995804 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.995873 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.995907 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.995941 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:08 crc kubenswrapper[4636]: I1002 21:24:08.995963 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:08Z","lastTransitionTime":"2025-10-02T21:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.099569 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.099639 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.099658 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.099689 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.099714 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:09Z","lastTransitionTime":"2025-10-02T21:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.202979 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.203046 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.203064 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.203091 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.203111 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:09Z","lastTransitionTime":"2025-10-02T21:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.305735 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.305850 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.305873 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.305900 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.305918 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:09Z","lastTransitionTime":"2025-10-02T21:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.409907 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.409983 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.410010 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.410045 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.410068 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:09Z","lastTransitionTime":"2025-10-02T21:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.513623 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.513684 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.513701 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.513729 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.513816 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:09Z","lastTransitionTime":"2025-10-02T21:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.603020 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:09 crc kubenswrapper[4636]: E1002 21:24:09.603225 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.603262 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:09 crc kubenswrapper[4636]: E1002 21:24:09.603421 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.603657 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:09 crc kubenswrapper[4636]: E1002 21:24:09.603928 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.616869 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.616941 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.616960 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.616989 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.617007 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:09Z","lastTransitionTime":"2025-10-02T21:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.625615 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.634444 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.635694 4636 scope.go:117] "RemoveContainer" containerID="9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6" Oct 02 21:24:09 crc kubenswrapper[4636]: E1002 21:24:09.635972 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.650227 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.674700 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.697928 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.720847 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.720907 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.720926 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.720953 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.720974 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:09Z","lastTransitionTime":"2025-10-02T21:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.721501 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.747044 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.766974 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.783855 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.804017 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.838688 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.838740 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.838765 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.838785 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.838794 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:09Z","lastTransitionTime":"2025-10-02T21:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.847686 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.881190 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.913674 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.927372 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.939885 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.941481 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.941508 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.941517 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.941532 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.941543 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:09Z","lastTransitionTime":"2025-10-02T21:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.959700 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfda02d284180239bcc111d8f250bd1b47666fe76a778e54f6d43d4ae6e6b3da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:02Z\\\",\\\"message\\\":\\\"rs/externalversions/factory.go:140\\\\nI1002 21:24:02.198121 5833 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 21:24:02.198130 5833 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 21:24:02.198136 5833 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 21:24:02.198142 5833 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1002 21:24:02.198149 5833 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1002 21:24:02.198203 5833 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 21:24:02.198115 5833 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 21:24:02.198562 5833 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.198808 5833 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 21:24:02.199190 5833 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1002 21:24:02.199595 5833 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"lane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1002 21:24:03.907196 6016 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907210 6016 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907190 6016 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler-operator/metrics]} name:Service_openshift-kube-scheduler-o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.972204 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:09 crc kubenswrapper[4636]: I1002 21:24:09.986406 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:09Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.003837 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.017834 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.030331 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.043712 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.044049 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.044094 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.044123 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.044145 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.044157 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:10Z","lastTransitionTime":"2025-10-02T21:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.055874 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.066872 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.078012 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.092342 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.114519 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.147349 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.147391 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.147399 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.147419 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.147431 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:10Z","lastTransitionTime":"2025-10-02T21:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.148199 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"lane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1002 21:24:03.907196 6016 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907210 6016 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907190 6016 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler-operator/metrics]} name:Service_openshift-kube-scheduler-o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.163991 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.179823 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.192893 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.207719 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.228298 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:10Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.249935 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.249978 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.249989 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.250007 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.250024 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:10Z","lastTransitionTime":"2025-10-02T21:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.353376 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.353423 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.353432 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.353447 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.353457 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:10Z","lastTransitionTime":"2025-10-02T21:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.456551 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.456593 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.456601 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.456615 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.456625 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:10Z","lastTransitionTime":"2025-10-02T21:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.559873 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.559942 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.559966 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.560000 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.560027 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:10Z","lastTransitionTime":"2025-10-02T21:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.603632 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:10 crc kubenswrapper[4636]: E1002 21:24:10.603902 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.662809 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.662850 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.662858 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.662874 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.662886 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:10Z","lastTransitionTime":"2025-10-02T21:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.765656 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.765696 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.765705 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.765721 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.765731 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:10Z","lastTransitionTime":"2025-10-02T21:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.868087 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.868134 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.868143 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.868163 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.868174 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:10Z","lastTransitionTime":"2025-10-02T21:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.970808 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.970848 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.970861 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.970878 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:10 crc kubenswrapper[4636]: I1002 21:24:10.970889 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:10Z","lastTransitionTime":"2025-10-02T21:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.073477 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.073514 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.073523 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.073539 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.073550 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:11Z","lastTransitionTime":"2025-10-02T21:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.175924 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.175972 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.175981 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.175997 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.176008 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:11Z","lastTransitionTime":"2025-10-02T21:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.278051 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.278179 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.278197 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.278214 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.278226 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:11Z","lastTransitionTime":"2025-10-02T21:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.381055 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.381122 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.381141 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.381570 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.381615 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:11Z","lastTransitionTime":"2025-10-02T21:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.484393 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.484448 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.484464 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.484490 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.484510 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:11Z","lastTransitionTime":"2025-10-02T21:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.587554 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.587664 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.587696 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.587728 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.587778 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:11Z","lastTransitionTime":"2025-10-02T21:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.602999 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.603050 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:11 crc kubenswrapper[4636]: E1002 21:24:11.603135 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.603003 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:11 crc kubenswrapper[4636]: E1002 21:24:11.603285 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:11 crc kubenswrapper[4636]: E1002 21:24:11.603404 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.694192 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.694239 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.694250 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.694269 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.694281 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:11Z","lastTransitionTime":"2025-10-02T21:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.796781 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.796816 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.796824 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.796838 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.796848 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:11Z","lastTransitionTime":"2025-10-02T21:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.899296 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.899355 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.899376 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.899406 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:11 crc kubenswrapper[4636]: I1002 21:24:11.899430 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:11Z","lastTransitionTime":"2025-10-02T21:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.001352 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.001390 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.001401 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.001418 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.001428 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:12Z","lastTransitionTime":"2025-10-02T21:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.104227 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.104289 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.104312 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.104342 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.104369 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:12Z","lastTransitionTime":"2025-10-02T21:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.206569 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.206618 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.206655 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.206675 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.206685 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:12Z","lastTransitionTime":"2025-10-02T21:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.309293 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.309327 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.309339 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.309356 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.309367 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:12Z","lastTransitionTime":"2025-10-02T21:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.412110 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.412153 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.412164 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.412219 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.412237 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:12Z","lastTransitionTime":"2025-10-02T21:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.514879 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.514928 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.514954 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.514971 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.514995 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:12Z","lastTransitionTime":"2025-10-02T21:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.602724 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:12 crc kubenswrapper[4636]: E1002 21:24:12.602872 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.617673 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.617711 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.617722 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.617738 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.617826 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:12Z","lastTransitionTime":"2025-10-02T21:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.704264 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:12 crc kubenswrapper[4636]: E1002 21:24:12.704422 4636 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:12 crc kubenswrapper[4636]: E1002 21:24:12.704494 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs podName:5e169ed7-2c2e-4623-9f21-330753911ab5 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:20.704476477 +0000 UTC m=+52.027484496 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs") pod "network-metrics-daemon-zssg6" (UID: "5e169ed7-2c2e-4623-9f21-330753911ab5") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.720290 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.720321 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.720329 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.720344 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.720353 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:12Z","lastTransitionTime":"2025-10-02T21:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.823227 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.823269 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.823283 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.823302 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.823313 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:12Z","lastTransitionTime":"2025-10-02T21:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.925726 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.925782 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.925795 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.925812 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:12 crc kubenswrapper[4636]: I1002 21:24:12.925822 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:12Z","lastTransitionTime":"2025-10-02T21:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.028641 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.028681 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.028689 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.028705 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.028714 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:13Z","lastTransitionTime":"2025-10-02T21:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.131111 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.131143 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.131151 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.131165 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.131174 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:13Z","lastTransitionTime":"2025-10-02T21:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.233166 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.233214 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.233232 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.233253 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.233266 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:13Z","lastTransitionTime":"2025-10-02T21:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.335254 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.335297 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.335309 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.335327 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.335339 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:13Z","lastTransitionTime":"2025-10-02T21:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.437838 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.437903 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.437915 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.437934 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.437947 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:13Z","lastTransitionTime":"2025-10-02T21:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.540413 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.540451 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.540463 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.540482 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.540493 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:13Z","lastTransitionTime":"2025-10-02T21:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.603332 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.603408 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.603463 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:13 crc kubenswrapper[4636]: E1002 21:24:13.603570 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:13 crc kubenswrapper[4636]: E1002 21:24:13.603771 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:13 crc kubenswrapper[4636]: E1002 21:24:13.603896 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.642900 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.642946 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.642955 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.642974 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.642987 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:13Z","lastTransitionTime":"2025-10-02T21:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.745959 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.745996 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.746006 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.746024 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.746036 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:13Z","lastTransitionTime":"2025-10-02T21:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.848391 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.848429 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.848438 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.848454 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.848464 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:13Z","lastTransitionTime":"2025-10-02T21:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.951258 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.951292 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.951303 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.951322 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:13 crc kubenswrapper[4636]: I1002 21:24:13.951334 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:13Z","lastTransitionTime":"2025-10-02T21:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.053534 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.053603 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.053621 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.053649 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.053668 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:14Z","lastTransitionTime":"2025-10-02T21:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.157555 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.157604 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.157620 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.157646 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.157663 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:14Z","lastTransitionTime":"2025-10-02T21:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.260070 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.260356 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.260451 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.260536 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.260616 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:14Z","lastTransitionTime":"2025-10-02T21:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.363937 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.364000 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.364019 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.364046 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.364063 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:14Z","lastTransitionTime":"2025-10-02T21:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.466553 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.466602 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.466618 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.466643 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.466660 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:14Z","lastTransitionTime":"2025-10-02T21:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.569907 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.570253 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.570389 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.570530 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.570658 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:14Z","lastTransitionTime":"2025-10-02T21:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.602672 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:14 crc kubenswrapper[4636]: E1002 21:24:14.602853 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.673589 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.673651 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.673673 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.673707 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.673730 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:14Z","lastTransitionTime":"2025-10-02T21:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.776042 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.776072 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.776105 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.776122 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.776132 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:14Z","lastTransitionTime":"2025-10-02T21:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.879221 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.879256 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.879273 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.879293 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.879304 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:14Z","lastTransitionTime":"2025-10-02T21:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.982870 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.983863 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.984034 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.984218 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:14 crc kubenswrapper[4636]: I1002 21:24:14.984386 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:14Z","lastTransitionTime":"2025-10-02T21:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.086830 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.087432 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.087587 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.087775 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.087923 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:15Z","lastTransitionTime":"2025-10-02T21:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.191067 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.191100 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.191108 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.191122 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.191132 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:15Z","lastTransitionTime":"2025-10-02T21:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.294082 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.294119 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.294129 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.294146 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.294159 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:15Z","lastTransitionTime":"2025-10-02T21:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.396457 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.396510 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.396521 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.396539 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.396548 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:15Z","lastTransitionTime":"2025-10-02T21:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.499054 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.499304 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.499405 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.499494 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.499562 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:15Z","lastTransitionTime":"2025-10-02T21:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.605710 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:15 crc kubenswrapper[4636]: E1002 21:24:15.605986 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.606009 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.606107 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:15 crc kubenswrapper[4636]: E1002 21:24:15.606243 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.606291 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.606329 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.606355 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.606375 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.606393 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:15Z","lastTransitionTime":"2025-10-02T21:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:15 crc kubenswrapper[4636]: E1002 21:24:15.606424 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.708458 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.708496 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.708509 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.708546 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.708558 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:15Z","lastTransitionTime":"2025-10-02T21:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.811395 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.811431 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.811443 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.811462 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.811475 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:15Z","lastTransitionTime":"2025-10-02T21:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.914290 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.914357 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.914378 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.914410 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:15 crc kubenswrapper[4636]: I1002 21:24:15.914432 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:15Z","lastTransitionTime":"2025-10-02T21:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.018084 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.018123 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.018137 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.018157 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.018170 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.115142 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.115186 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.115198 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.115216 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.115227 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: E1002 21:24:16.129835 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:16Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.134281 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.134319 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.134331 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.134349 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.134364 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: E1002 21:24:16.146304 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:16Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.150674 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.150704 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.150712 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.150727 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.150737 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: E1002 21:24:16.165131 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:16Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.168792 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.168865 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.168878 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.168904 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.168917 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: E1002 21:24:16.186178 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:16Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.190312 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.190366 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.190378 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.190416 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.190431 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: E1002 21:24:16.202956 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:16Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:16 crc kubenswrapper[4636]: E1002 21:24:16.203088 4636 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.204411 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.204470 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.204481 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.204498 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.204512 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.306983 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.307324 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.307421 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.307522 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.307635 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.409908 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.409947 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.409961 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.409980 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.409994 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.511712 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.511767 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.511780 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.511800 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.511814 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.603472 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:16 crc kubenswrapper[4636]: E1002 21:24:16.603592 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.614026 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.614071 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.614080 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.614103 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.614114 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.716838 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.716876 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.716891 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.716908 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.716919 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.819268 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.819303 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.819311 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.819327 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.819338 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.921481 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.921545 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.921570 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.921600 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:16 crc kubenswrapper[4636]: I1002 21:24:16.921622 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:16Z","lastTransitionTime":"2025-10-02T21:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.023272 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.023318 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.023328 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.023345 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.023354 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:17Z","lastTransitionTime":"2025-10-02T21:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.125515 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.125553 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.125564 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.125582 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.125593 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:17Z","lastTransitionTime":"2025-10-02T21:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.228163 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.228206 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.228217 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.228235 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.228244 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:17Z","lastTransitionTime":"2025-10-02T21:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.313835 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.326802 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.330518 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.330548 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.330556 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.330570 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.330580 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:17Z","lastTransitionTime":"2025-10-02T21:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.338239 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.348379 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.359268 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.374837 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.385801 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.407848 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.430601 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.432972 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.433134 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.433433 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.433655 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.433926 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:17Z","lastTransitionTime":"2025-10-02T21:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.447091 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.463599 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.478662 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.501081 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.531981 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"lane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1002 21:24:03.907196 6016 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907210 6016 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907190 6016 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler-operator/metrics]} name:Service_openshift-kube-scheduler-o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.537135 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.537436 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.537634 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.537871 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.538049 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:17Z","lastTransitionTime":"2025-10-02T21:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.545805 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.558972 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.569866 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:17Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.604034 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:17 crc kubenswrapper[4636]: E1002 21:24:17.604253 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.604540 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:17 crc kubenswrapper[4636]: E1002 21:24:17.604664 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.604853 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:17 crc kubenswrapper[4636]: E1002 21:24:17.605067 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.641540 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.641948 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.642028 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.642094 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.642164 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:17Z","lastTransitionTime":"2025-10-02T21:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.745387 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.745449 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.745465 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.745492 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.745510 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:17Z","lastTransitionTime":"2025-10-02T21:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.848561 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.848868 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.848957 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.849043 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.849127 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:17Z","lastTransitionTime":"2025-10-02T21:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.952530 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.952603 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.952629 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.952665 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:17 crc kubenswrapper[4636]: I1002 21:24:17.952690 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:17Z","lastTransitionTime":"2025-10-02T21:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.055668 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.055703 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.055712 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.055728 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.055739 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:18Z","lastTransitionTime":"2025-10-02T21:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.157840 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.157886 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.157900 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.157921 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.157936 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:18Z","lastTransitionTime":"2025-10-02T21:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.260274 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.260334 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.260352 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.260379 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.260396 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:18Z","lastTransitionTime":"2025-10-02T21:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.363213 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.363292 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.363315 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.363342 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.363359 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:18Z","lastTransitionTime":"2025-10-02T21:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.466324 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.466420 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.466439 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.466466 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.466484 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:18Z","lastTransitionTime":"2025-10-02T21:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.568442 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.568479 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.568487 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.568502 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.568511 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:18Z","lastTransitionTime":"2025-10-02T21:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.602951 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:18 crc kubenswrapper[4636]: E1002 21:24:18.603351 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.671108 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.671162 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.671172 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.671189 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.671199 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:18Z","lastTransitionTime":"2025-10-02T21:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.773414 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.773776 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.773939 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.774045 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.774108 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:18Z","lastTransitionTime":"2025-10-02T21:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.876597 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.876663 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.876682 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.876709 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.876726 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:18Z","lastTransitionTime":"2025-10-02T21:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.979846 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.980190 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.980258 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.980329 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:18 crc kubenswrapper[4636]: I1002 21:24:18.980397 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:18Z","lastTransitionTime":"2025-10-02T21:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.083066 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.083103 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.083113 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.083132 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.083142 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:19Z","lastTransitionTime":"2025-10-02T21:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.185701 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.185979 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.186014 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.186045 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.186070 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:19Z","lastTransitionTime":"2025-10-02T21:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.288911 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.288941 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.288951 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.288985 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.288994 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:19Z","lastTransitionTime":"2025-10-02T21:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.391148 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.391185 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.391193 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.391210 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.391219 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:19Z","lastTransitionTime":"2025-10-02T21:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.493691 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.493958 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.494096 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.494191 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.494268 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:19Z","lastTransitionTime":"2025-10-02T21:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.597476 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.597549 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.597572 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.597603 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.597643 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:19Z","lastTransitionTime":"2025-10-02T21:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.603644 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.603731 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:19 crc kubenswrapper[4636]: E1002 21:24:19.603878 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.603933 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:19 crc kubenswrapper[4636]: E1002 21:24:19.603986 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:19 crc kubenswrapper[4636]: E1002 21:24:19.604133 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.619264 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.632562 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.644432 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.656388 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.666573 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.682954 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.700378 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.700629 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.700691 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.700781 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.701029 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:19Z","lastTransitionTime":"2025-10-02T21:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.709712 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"lane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1002 21:24:03.907196 6016 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907210 6016 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907190 6016 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler-operator/metrics]} name:Service_openshift-kube-scheduler-o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.723669 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.738009 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.749314 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.765543 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.778370 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.791877 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.804360 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.804411 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.804419 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.804437 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.804447 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:19Z","lastTransitionTime":"2025-10-02T21:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.806095 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.814812 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.829224 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:19Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.907675 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.907732 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.907742 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.907777 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:19 crc kubenswrapper[4636]: I1002 21:24:19.907789 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:19Z","lastTransitionTime":"2025-10-02T21:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.010771 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.010814 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.010822 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.010837 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.010848 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:20Z","lastTransitionTime":"2025-10-02T21:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.113064 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.113107 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.113139 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.113159 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.113172 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:20Z","lastTransitionTime":"2025-10-02T21:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.215982 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.216035 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.216052 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.216075 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.216091 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:20Z","lastTransitionTime":"2025-10-02T21:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.318565 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.318615 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.318629 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.318650 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.318662 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:20Z","lastTransitionTime":"2025-10-02T21:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.421918 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.421974 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.421986 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.422005 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.422017 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:20Z","lastTransitionTime":"2025-10-02T21:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.525263 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.525333 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.525350 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.525377 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.525397 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:20Z","lastTransitionTime":"2025-10-02T21:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.602905 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:20 crc kubenswrapper[4636]: E1002 21:24:20.603073 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.627498 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.627585 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.627627 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.627648 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.627662 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:20Z","lastTransitionTime":"2025-10-02T21:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.731007 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.731082 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.731099 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.731124 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.731172 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:20Z","lastTransitionTime":"2025-10-02T21:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.781265 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:20 crc kubenswrapper[4636]: E1002 21:24:20.781805 4636 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:20 crc kubenswrapper[4636]: E1002 21:24:20.782014 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs podName:5e169ed7-2c2e-4623-9f21-330753911ab5 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:36.781979073 +0000 UTC m=+68.104987122 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs") pod "network-metrics-daemon-zssg6" (UID: "5e169ed7-2c2e-4623-9f21-330753911ab5") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.834557 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.834623 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.834645 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.834677 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.834702 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:20Z","lastTransitionTime":"2025-10-02T21:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.937826 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.937878 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.937898 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.937925 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:20 crc kubenswrapper[4636]: I1002 21:24:20.937943 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:20Z","lastTransitionTime":"2025-10-02T21:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.041874 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.041938 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.041955 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.041981 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.042001 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:21Z","lastTransitionTime":"2025-10-02T21:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.145443 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.145504 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.145522 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.145550 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.145570 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:21Z","lastTransitionTime":"2025-10-02T21:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.248842 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.248911 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.248929 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.248954 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.248973 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:21Z","lastTransitionTime":"2025-10-02T21:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.352341 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.352414 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.352436 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.352464 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.352484 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:21Z","lastTransitionTime":"2025-10-02T21:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.456031 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.456099 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.456124 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.456156 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.456195 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:21Z","lastTransitionTime":"2025-10-02T21:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.560105 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.560205 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.560231 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.560267 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.560292 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:21Z","lastTransitionTime":"2025-10-02T21:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.592067 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.592334 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:24:53.592297207 +0000 UTC m=+84.915305266 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.603164 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.603180 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.603239 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.603388 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.603443 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.603337 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.663557 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.663591 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.663604 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.663634 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.663652 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:21Z","lastTransitionTime":"2025-10-02T21:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.693517 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.693589 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.693628 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.693684 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.693887 4636 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.693967 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:53.693944559 +0000 UTC m=+85.016952588 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.694025 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.694064 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.694080 4636 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.694147 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:53.694129965 +0000 UTC m=+85.017137974 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.694207 4636 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.694253 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:53.694239738 +0000 UTC m=+85.017247887 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.694348 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.694381 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.694397 4636 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:21 crc kubenswrapper[4636]: E1002 21:24:21.694435 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 21:24:53.694422833 +0000 UTC m=+85.017430982 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.767414 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.767459 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.767470 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.767488 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.767500 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:21Z","lastTransitionTime":"2025-10-02T21:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.824077 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.842855 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.847526 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:21Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.865727 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:21Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.883805 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.883866 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.883884 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.883915 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.883934 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:21Z","lastTransitionTime":"2025-10-02T21:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.886574 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:21Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.903849 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:21Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.921648 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:21Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.945508 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:21Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.965404 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:21Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.983980 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:21Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.987504 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.987558 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.987576 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.987601 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:21 crc kubenswrapper[4636]: I1002 21:24:21.987616 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:21Z","lastTransitionTime":"2025-10-02T21:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.005830 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:22Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.020234 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:22Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.041782 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:22Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.063046 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"lane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1002 21:24:03.907196 6016 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907210 6016 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907190 6016 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler-operator/metrics]} name:Service_openshift-kube-scheduler-o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:22Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.073887 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:22Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.086820 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:22Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.091069 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.091110 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.091124 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.091143 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.091159 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:22Z","lastTransitionTime":"2025-10-02T21:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.104824 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:22Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.122589 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:22Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.194380 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.194438 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.194454 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.194476 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.194494 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:22Z","lastTransitionTime":"2025-10-02T21:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.297848 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.298241 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.298443 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.298619 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.298803 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:22Z","lastTransitionTime":"2025-10-02T21:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.401508 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.401599 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.401625 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.401666 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.401696 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:22Z","lastTransitionTime":"2025-10-02T21:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.504023 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.504096 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.504117 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.504145 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.504165 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:22Z","lastTransitionTime":"2025-10-02T21:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.603141 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:22 crc kubenswrapper[4636]: E1002 21:24:22.603621 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.607557 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.607622 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.607641 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.607673 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.607692 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:22Z","lastTransitionTime":"2025-10-02T21:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.712849 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.712885 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.712895 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.712935 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.712950 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:22Z","lastTransitionTime":"2025-10-02T21:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.815332 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.815377 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.815388 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.815408 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.815420 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:22Z","lastTransitionTime":"2025-10-02T21:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.918328 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.918391 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.918410 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.918433 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:22 crc kubenswrapper[4636]: I1002 21:24:22.918449 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:22Z","lastTransitionTime":"2025-10-02T21:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.021082 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.021115 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.021123 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.021138 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.021147 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:23Z","lastTransitionTime":"2025-10-02T21:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.123968 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.124015 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.124031 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.124056 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.124074 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:23Z","lastTransitionTime":"2025-10-02T21:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.226960 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.227000 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.227011 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.227030 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.227041 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:23Z","lastTransitionTime":"2025-10-02T21:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.329985 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.330018 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.330027 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.330044 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.330054 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:23Z","lastTransitionTime":"2025-10-02T21:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.432910 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.432945 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.432956 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.432978 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.432989 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:23Z","lastTransitionTime":"2025-10-02T21:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.535980 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.536059 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.536079 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.536107 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.536126 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:23Z","lastTransitionTime":"2025-10-02T21:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.603312 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:23 crc kubenswrapper[4636]: E1002 21:24:23.603639 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.604133 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:23 crc kubenswrapper[4636]: E1002 21:24:23.604402 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.604466 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:23 crc kubenswrapper[4636]: E1002 21:24:23.604626 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.638531 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.638580 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.638595 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.638616 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.638633 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:23Z","lastTransitionTime":"2025-10-02T21:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.741202 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.741237 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.741248 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.741266 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.741279 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:23Z","lastTransitionTime":"2025-10-02T21:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.843848 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.843881 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.843890 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.843904 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.843911 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:23Z","lastTransitionTime":"2025-10-02T21:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.947325 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.947382 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.947399 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.947424 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:23 crc kubenswrapper[4636]: I1002 21:24:23.947441 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:23Z","lastTransitionTime":"2025-10-02T21:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.049364 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.049406 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.049417 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.049435 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.049446 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:24Z","lastTransitionTime":"2025-10-02T21:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.152428 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.152476 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.152488 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.152510 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.152523 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:24Z","lastTransitionTime":"2025-10-02T21:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.255099 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.255147 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.255160 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.255180 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.255194 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:24Z","lastTransitionTime":"2025-10-02T21:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.359007 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.359060 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.359071 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.359089 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.359102 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:24Z","lastTransitionTime":"2025-10-02T21:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.461292 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.461329 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.461339 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.461355 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.461365 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:24Z","lastTransitionTime":"2025-10-02T21:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.564553 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.564605 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.564619 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.564643 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.564656 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:24Z","lastTransitionTime":"2025-10-02T21:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.603601 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:24 crc kubenswrapper[4636]: E1002 21:24:24.603790 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.605271 4636 scope.go:117] "RemoveContainer" containerID="9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.667307 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.667592 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.667605 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.667625 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.667638 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:24Z","lastTransitionTime":"2025-10-02T21:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.769595 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.769632 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.769646 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.769670 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.769682 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:24Z","lastTransitionTime":"2025-10-02T21:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.872933 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.872996 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.873018 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.873044 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.873065 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:24Z","lastTransitionTime":"2025-10-02T21:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.975638 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.975679 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.975690 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.975708 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:24 crc kubenswrapper[4636]: I1002 21:24:24.975722 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:24Z","lastTransitionTime":"2025-10-02T21:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.049089 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/1.log" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.054188 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb"} Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.054683 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.080200 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.080272 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.080291 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.080317 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.080335 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:25Z","lastTransitionTime":"2025-10-02T21:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.081098 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.115586 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.144484 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.183504 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.183544 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.183560 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.183586 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.183604 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:25Z","lastTransitionTime":"2025-10-02T21:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.186989 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.213667 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.228569 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.255256 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.273193 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.286366 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.286815 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.286875 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.286896 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.286927 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.286949 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:25Z","lastTransitionTime":"2025-10-02T21:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.303733 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"lane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1002 21:24:03.907196 6016 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907210 6016 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907190 6016 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler-operator/metrics]} name:Service_openshift-kube-scheduler-o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.312598 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.321826 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.339829 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.353865 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.370490 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.384326 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.389448 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.389524 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.389537 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.389583 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.389597 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:25Z","lastTransitionTime":"2025-10-02T21:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.399454 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.492343 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.492386 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.492395 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.492414 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.492423 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:25Z","lastTransitionTime":"2025-10-02T21:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.595262 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.595332 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.595347 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.595370 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.595386 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:25Z","lastTransitionTime":"2025-10-02T21:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.603585 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.603647 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.603611 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:25 crc kubenswrapper[4636]: E1002 21:24:25.603777 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:25 crc kubenswrapper[4636]: E1002 21:24:25.603865 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:25 crc kubenswrapper[4636]: E1002 21:24:25.603931 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.698015 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.698058 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.698067 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.698087 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.698098 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:25Z","lastTransitionTime":"2025-10-02T21:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.801285 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.801326 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.801337 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.801355 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.801366 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:25Z","lastTransitionTime":"2025-10-02T21:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.903814 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.903858 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.903870 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.903887 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:25 crc kubenswrapper[4636]: I1002 21:24:25.903899 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:25Z","lastTransitionTime":"2025-10-02T21:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.005990 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.006031 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.006042 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.006060 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.006072 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.059195 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/2.log" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.060170 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/1.log" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.063283 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb" exitCode=1 Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.063314 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb"} Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.063370 4636 scope.go:117] "RemoveContainer" containerID="9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.067182 4636 scope.go:117] "RemoveContainer" containerID="cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb" Oct 02 21:24:26 crc kubenswrapper[4636]: E1002 21:24:26.067559 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.089942 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9f4854a87d578c56d86fddecc76d1aab39d35bf1b98c3ef2ae2712e9524d6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"lane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1002 21:24:03.907196 6016 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907210 6016 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1002 21:24:03.907190 6016 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-scheduler-operator/metrics]} name:Service_openshift-kube-scheduler-o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:25Z\\\",\\\"message\\\":\\\"er: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z]\\\\nI1002 21:24:25.634353 6273 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1002 21:24:25.634367 6273 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634373 6273 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1002 21:24:25.634378 6273 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634383 6273 base_network_controller_pods.go:477] [default/openshift-multus/network-metri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.101958 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.109262 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.109325 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.109339 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.109358 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.109369 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.115297 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.130372 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.145615 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.162699 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.173446 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.187486 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.198429 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.211592 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.211891 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.212181 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.212414 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.216460 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.218737 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.232485 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.249173 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.265362 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.280459 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.298645 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.314028 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.319144 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.319403 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.319549 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.319689 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.319839 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.328182 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.422190 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.422220 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.422228 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.422242 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.422250 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.452191 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.452402 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.452507 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.452598 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.452687 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: E1002 21:24:26.467094 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.471721 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.471782 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.471798 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.471819 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.471836 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: E1002 21:24:26.486060 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.489938 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.490003 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.490019 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.490041 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.490056 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: E1002 21:24:26.501600 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.505267 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.505303 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.505312 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.505331 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.505344 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: E1002 21:24:26.516026 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.519316 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.519358 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.519373 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.519397 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.519416 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: E1002 21:24:26.531993 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:26Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:26 crc kubenswrapper[4636]: E1002 21:24:26.532143 4636 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.533546 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.533584 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.533602 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.533624 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.533663 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.602785 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:26 crc kubenswrapper[4636]: E1002 21:24:26.602950 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.636029 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.636079 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.636092 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.636112 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.636127 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.738921 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.738960 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.738969 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.738988 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.738998 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.841608 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.841641 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.841652 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.841669 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.841679 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.944341 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.944615 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.944679 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.944778 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:26 crc kubenswrapper[4636]: I1002 21:24:26.944869 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:26Z","lastTransitionTime":"2025-10-02T21:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.046991 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.047241 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.047314 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.047380 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.047445 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:27Z","lastTransitionTime":"2025-10-02T21:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.067861 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/2.log" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.071065 4636 scope.go:117] "RemoveContainer" containerID="cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb" Oct 02 21:24:27 crc kubenswrapper[4636]: E1002 21:24:27.071198 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.084119 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.102965 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.116664 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.127406 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.138386 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.149790 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.149859 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.149882 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.149914 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.149939 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:27Z","lastTransitionTime":"2025-10-02T21:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.153971 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.165947 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.179356 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.191651 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.212052 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.228023 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.242936 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.251979 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.252021 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.252037 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.252059 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.252071 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:27Z","lastTransitionTime":"2025-10-02T21:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.258317 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.279950 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:25Z\\\",\\\"message\\\":\\\"er: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z]\\\\nI1002 21:24:25.634353 6273 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1002 21:24:25.634367 6273 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634373 6273 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1002 21:24:25.634378 6273 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634383 6273 base_network_controller_pods.go:477] [default/openshift-multus/network-metri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.292997 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.302977 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.315270 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:27Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.353917 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.353975 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.353989 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.354007 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.354042 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:27Z","lastTransitionTime":"2025-10-02T21:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.456963 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.456993 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.457021 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.457038 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.457049 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:27Z","lastTransitionTime":"2025-10-02T21:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.559921 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.559990 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.560015 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.560051 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.560074 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:27Z","lastTransitionTime":"2025-10-02T21:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.603181 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.603218 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:27 crc kubenswrapper[4636]: E1002 21:24:27.603327 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.603392 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:27 crc kubenswrapper[4636]: E1002 21:24:27.603447 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:27 crc kubenswrapper[4636]: E1002 21:24:27.603542 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.662855 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.662922 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.662946 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.662977 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.663000 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:27Z","lastTransitionTime":"2025-10-02T21:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.766126 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.766180 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.766195 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.766222 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.766240 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:27Z","lastTransitionTime":"2025-10-02T21:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.869196 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.869257 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.869281 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.869311 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.869328 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:27Z","lastTransitionTime":"2025-10-02T21:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.972048 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.972126 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.972148 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.972178 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:27 crc kubenswrapper[4636]: I1002 21:24:27.972198 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:27Z","lastTransitionTime":"2025-10-02T21:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.074747 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.074840 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.074857 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.074883 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.074902 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:28Z","lastTransitionTime":"2025-10-02T21:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.178133 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.178207 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.178226 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.178252 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.178269 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:28Z","lastTransitionTime":"2025-10-02T21:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.281228 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.281288 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.281307 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.281334 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.281351 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:28Z","lastTransitionTime":"2025-10-02T21:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.385104 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.385184 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.385201 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.385231 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.385279 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:28Z","lastTransitionTime":"2025-10-02T21:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.487712 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.487798 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.487816 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.487841 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.487859 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:28Z","lastTransitionTime":"2025-10-02T21:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.590925 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.591021 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.591038 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.591097 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.591120 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:28Z","lastTransitionTime":"2025-10-02T21:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.603700 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:28 crc kubenswrapper[4636]: E1002 21:24:28.603999 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.693954 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.694020 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.694044 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.694074 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.694096 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:28Z","lastTransitionTime":"2025-10-02T21:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.796577 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.796615 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.796625 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.796643 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.796654 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:28Z","lastTransitionTime":"2025-10-02T21:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.899228 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.899272 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.899288 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.899310 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:28 crc kubenswrapper[4636]: I1002 21:24:28.899326 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:28Z","lastTransitionTime":"2025-10-02T21:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.002305 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.002697 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.003001 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.003214 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.003412 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:29Z","lastTransitionTime":"2025-10-02T21:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.106097 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.106185 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.106205 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.106234 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.106251 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:29Z","lastTransitionTime":"2025-10-02T21:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.208810 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.208887 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.208906 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.208933 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.208951 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:29Z","lastTransitionTime":"2025-10-02T21:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.311630 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.311693 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.311710 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.311737 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.311783 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:29Z","lastTransitionTime":"2025-10-02T21:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.414174 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.414239 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.414256 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.414282 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.414299 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:29Z","lastTransitionTime":"2025-10-02T21:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.516613 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.516653 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.516664 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.516680 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.516691 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:29Z","lastTransitionTime":"2025-10-02T21:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.603519 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.603702 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.603815 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:29 crc kubenswrapper[4636]: E1002 21:24:29.604269 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:29 crc kubenswrapper[4636]: E1002 21:24:29.604366 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:29 crc kubenswrapper[4636]: E1002 21:24:29.604528 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.619089 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.619132 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.619143 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.619159 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.619170 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:29Z","lastTransitionTime":"2025-10-02T21:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.623159 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.646465 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.662394 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.678027 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.691709 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.714836 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.721493 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.721531 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.721542 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.721586 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.721598 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:29Z","lastTransitionTime":"2025-10-02T21:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.734320 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.748290 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.761572 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.773510 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.798146 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.824420 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.824457 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.824466 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.824486 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.824495 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:29Z","lastTransitionTime":"2025-10-02T21:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.844342 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:25Z\\\",\\\"message\\\":\\\"er: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z]\\\\nI1002 21:24:25.634353 6273 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1002 21:24:25.634367 6273 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634373 6273 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1002 21:24:25.634378 6273 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634383 6273 base_network_controller_pods.go:477] [default/openshift-multus/network-metri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.862688 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.875189 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.886285 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.905570 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.915658 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:29Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.926510 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.926623 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.926690 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.926785 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:29 crc kubenswrapper[4636]: I1002 21:24:29.926869 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:29Z","lastTransitionTime":"2025-10-02T21:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.029690 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.029740 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.029774 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.029795 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.029807 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:30Z","lastTransitionTime":"2025-10-02T21:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.133285 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.133532 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.133674 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.133820 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.133924 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:30Z","lastTransitionTime":"2025-10-02T21:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.236685 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.236711 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.236719 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.236761 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.236772 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:30Z","lastTransitionTime":"2025-10-02T21:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.339898 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.339961 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.339977 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.340002 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.340020 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:30Z","lastTransitionTime":"2025-10-02T21:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.443358 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.443465 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.443491 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.443523 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.443547 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:30Z","lastTransitionTime":"2025-10-02T21:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.546533 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.546582 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.546599 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.546621 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.546635 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:30Z","lastTransitionTime":"2025-10-02T21:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.603257 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:30 crc kubenswrapper[4636]: E1002 21:24:30.603410 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.648461 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.648670 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.648738 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.648822 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.648877 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:30Z","lastTransitionTime":"2025-10-02T21:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.751590 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.751994 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.752133 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.752302 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.752467 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:30Z","lastTransitionTime":"2025-10-02T21:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.856131 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.856168 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.856176 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.856191 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.856200 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:30Z","lastTransitionTime":"2025-10-02T21:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.958980 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.959316 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.959684 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.960016 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:30 crc kubenswrapper[4636]: I1002 21:24:30.960378 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:30Z","lastTransitionTime":"2025-10-02T21:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.064086 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.064456 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.064679 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.064914 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.065121 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:31Z","lastTransitionTime":"2025-10-02T21:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.167607 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.168041 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.168286 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.168690 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.169127 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:31Z","lastTransitionTime":"2025-10-02T21:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.273269 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.273383 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.273406 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.273436 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.273457 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:31Z","lastTransitionTime":"2025-10-02T21:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.379236 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.379328 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.379349 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.379381 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.379401 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:31Z","lastTransitionTime":"2025-10-02T21:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.482439 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.482475 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.482483 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.482519 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.482530 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:31Z","lastTransitionTime":"2025-10-02T21:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.585292 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.585366 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.585385 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.585416 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.585436 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:31Z","lastTransitionTime":"2025-10-02T21:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.603008 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.603067 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.603091 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:31 crc kubenswrapper[4636]: E1002 21:24:31.603232 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:31 crc kubenswrapper[4636]: E1002 21:24:31.603398 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:31 crc kubenswrapper[4636]: E1002 21:24:31.603577 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.717423 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.717503 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.717521 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.718254 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.718325 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:31Z","lastTransitionTime":"2025-10-02T21:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.822328 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.822379 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.822390 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.822408 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.822420 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:31Z","lastTransitionTime":"2025-10-02T21:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.925368 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.925435 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.925449 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.925466 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:31 crc kubenswrapper[4636]: I1002 21:24:31.925477 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:31Z","lastTransitionTime":"2025-10-02T21:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.028004 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.028046 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.028056 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.028076 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.028089 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:32Z","lastTransitionTime":"2025-10-02T21:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.130788 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.130821 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.130831 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.130848 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.130860 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:32Z","lastTransitionTime":"2025-10-02T21:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.233559 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.233614 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.233627 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.233645 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.233707 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:32Z","lastTransitionTime":"2025-10-02T21:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.337164 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.337236 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.337278 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.337297 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.337309 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:32Z","lastTransitionTime":"2025-10-02T21:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.440291 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.440353 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.440371 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.440400 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.440422 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:32Z","lastTransitionTime":"2025-10-02T21:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.543518 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.543607 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.543628 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.543661 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.543685 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:32Z","lastTransitionTime":"2025-10-02T21:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.603228 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:32 crc kubenswrapper[4636]: E1002 21:24:32.603447 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.646513 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.646606 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.646633 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.646668 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.646691 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:32Z","lastTransitionTime":"2025-10-02T21:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.749894 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.749936 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.749945 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.749962 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.749973 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:32Z","lastTransitionTime":"2025-10-02T21:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.852964 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.852998 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.853006 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.853023 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.853038 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:32Z","lastTransitionTime":"2025-10-02T21:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.956532 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.956589 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.956607 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.956635 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:32 crc kubenswrapper[4636]: I1002 21:24:32.956654 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:32Z","lastTransitionTime":"2025-10-02T21:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.059397 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.059443 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.059462 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.059487 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.059502 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:33Z","lastTransitionTime":"2025-10-02T21:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.162346 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.162396 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.162405 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.162426 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.162437 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:33Z","lastTransitionTime":"2025-10-02T21:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.264593 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.264630 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.264639 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.264657 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.264667 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:33Z","lastTransitionTime":"2025-10-02T21:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.367054 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.367088 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.367098 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.367114 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.367124 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:33Z","lastTransitionTime":"2025-10-02T21:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.470292 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.470339 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.470577 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.470608 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.470625 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:33Z","lastTransitionTime":"2025-10-02T21:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.573594 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.573654 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.573673 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.573701 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.573721 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:33Z","lastTransitionTime":"2025-10-02T21:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.602992 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.603082 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.602992 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:33 crc kubenswrapper[4636]: E1002 21:24:33.603190 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:33 crc kubenswrapper[4636]: E1002 21:24:33.603296 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:33 crc kubenswrapper[4636]: E1002 21:24:33.603410 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.676700 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.676740 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.676783 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.676803 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.676814 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:33Z","lastTransitionTime":"2025-10-02T21:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.779281 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.779323 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.779333 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.779350 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.779361 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:33Z","lastTransitionTime":"2025-10-02T21:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.881844 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.881902 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.881914 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.881932 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.881973 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:33Z","lastTransitionTime":"2025-10-02T21:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.985057 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.985104 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.985115 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.985136 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:33 crc kubenswrapper[4636]: I1002 21:24:33.985147 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:33Z","lastTransitionTime":"2025-10-02T21:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.092062 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.092174 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.092629 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.092657 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.092666 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:34Z","lastTransitionTime":"2025-10-02T21:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.195326 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.195369 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.195380 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.195398 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.195409 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:34Z","lastTransitionTime":"2025-10-02T21:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.297777 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.297825 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.297837 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.297854 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.297868 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:34Z","lastTransitionTime":"2025-10-02T21:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.400438 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.400466 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.400473 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.400490 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.400498 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:34Z","lastTransitionTime":"2025-10-02T21:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.503179 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.503213 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.503224 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.503239 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.503252 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:34Z","lastTransitionTime":"2025-10-02T21:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.602915 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:34 crc kubenswrapper[4636]: E1002 21:24:34.603084 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.604721 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.604741 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.604760 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.604773 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.604782 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:34Z","lastTransitionTime":"2025-10-02T21:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.707446 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.707503 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.707519 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.707543 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.707560 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:34Z","lastTransitionTime":"2025-10-02T21:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.821067 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.821095 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.821103 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.821116 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.821124 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:34Z","lastTransitionTime":"2025-10-02T21:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.923540 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.923568 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.923576 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.923590 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:34 crc kubenswrapper[4636]: I1002 21:24:34.923598 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:34Z","lastTransitionTime":"2025-10-02T21:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.026409 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.026441 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.026452 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.026469 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.026478 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:35Z","lastTransitionTime":"2025-10-02T21:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.129422 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.129478 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.129496 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.129521 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.129537 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:35Z","lastTransitionTime":"2025-10-02T21:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.232124 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.232149 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.232158 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.232172 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.232181 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:35Z","lastTransitionTime":"2025-10-02T21:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.335123 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.335186 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.335204 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.335228 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.335245 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:35Z","lastTransitionTime":"2025-10-02T21:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.440512 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.440571 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.440588 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.440612 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.440631 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:35Z","lastTransitionTime":"2025-10-02T21:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.544660 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.544701 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.544711 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.544726 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.544737 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:35Z","lastTransitionTime":"2025-10-02T21:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.602648 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.602680 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.602826 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:35 crc kubenswrapper[4636]: E1002 21:24:35.602877 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:35 crc kubenswrapper[4636]: E1002 21:24:35.603066 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:35 crc kubenswrapper[4636]: E1002 21:24:35.603173 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.647726 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.647826 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.647851 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.647879 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.647902 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:35Z","lastTransitionTime":"2025-10-02T21:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.750005 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.750088 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.750106 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.750130 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.750147 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:35Z","lastTransitionTime":"2025-10-02T21:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.853335 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.853372 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.853380 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.853396 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.853408 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:35Z","lastTransitionTime":"2025-10-02T21:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.955832 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.955869 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.955878 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.955893 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:35 crc kubenswrapper[4636]: I1002 21:24:35.955905 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:35Z","lastTransitionTime":"2025-10-02T21:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.057654 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.057692 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.057701 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.057716 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.057725 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.159824 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.159864 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.159874 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.159888 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.159897 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.262355 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.262404 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.262418 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.262435 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.262464 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.365841 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.365917 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.365936 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.365962 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.365980 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.468770 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.468820 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.468836 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.468854 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.468872 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.575411 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.575464 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.575477 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.575507 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.575521 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.602769 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:36 crc kubenswrapper[4636]: E1002 21:24:36.602953 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.607567 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.607623 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.607633 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.607647 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.607659 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: E1002 21:24:36.621399 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:36Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.626551 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.626577 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.626586 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.626601 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.626610 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: E1002 21:24:36.640558 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:36Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.645610 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.645639 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.645648 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.645662 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.645673 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: E1002 21:24:36.658135 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:36Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.664484 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.664545 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.664558 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.664598 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.664614 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: E1002 21:24:36.677855 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:36Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.681913 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.681944 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.681955 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.681971 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.681982 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: E1002 21:24:36.692878 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:36Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:36 crc kubenswrapper[4636]: E1002 21:24:36.692992 4636 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.695150 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.695180 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.695189 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.695202 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.695211 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.798429 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.798497 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.798516 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.798540 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.798557 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.880079 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:36 crc kubenswrapper[4636]: E1002 21:24:36.880250 4636 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:36 crc kubenswrapper[4636]: E1002 21:24:36.880340 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs podName:5e169ed7-2c2e-4623-9f21-330753911ab5 nodeName:}" failed. No retries permitted until 2025-10-02 21:25:08.880322615 +0000 UTC m=+100.203330634 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs") pod "network-metrics-daemon-zssg6" (UID: "5e169ed7-2c2e-4623-9f21-330753911ab5") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.900966 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.901035 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.901047 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.901064 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:36 crc kubenswrapper[4636]: I1002 21:24:36.901079 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:36Z","lastTransitionTime":"2025-10-02T21:24:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.003836 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.003888 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.003901 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.003918 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.003935 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:37Z","lastTransitionTime":"2025-10-02T21:24:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.106432 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.106482 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.106502 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.106521 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.106534 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:37Z","lastTransitionTime":"2025-10-02T21:24:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.208768 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.208810 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.208820 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.208839 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.208849 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:37Z","lastTransitionTime":"2025-10-02T21:24:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.311381 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.311418 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.311427 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.311444 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.311453 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:37Z","lastTransitionTime":"2025-10-02T21:24:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.414700 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.414767 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.414778 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.414795 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.414807 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:37Z","lastTransitionTime":"2025-10-02T21:24:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.518291 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.518341 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.518351 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.518370 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.518384 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:37Z","lastTransitionTime":"2025-10-02T21:24:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.603360 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.603435 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:37 crc kubenswrapper[4636]: E1002 21:24:37.603489 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:37 crc kubenswrapper[4636]: E1002 21:24:37.603583 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.603451 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:37 crc kubenswrapper[4636]: E1002 21:24:37.603675 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.621209 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.621259 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.621268 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.621286 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.621297 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:37Z","lastTransitionTime":"2025-10-02T21:24:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.724268 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.724298 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.724306 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.724319 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.724328 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:37Z","lastTransitionTime":"2025-10-02T21:24:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.827825 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.827893 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.827908 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.827935 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.827955 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:37Z","lastTransitionTime":"2025-10-02T21:24:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.932221 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.932285 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.932299 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.932335 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:37 crc kubenswrapper[4636]: I1002 21:24:37.932349 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:37Z","lastTransitionTime":"2025-10-02T21:24:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.036028 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.036126 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.036156 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.036197 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.036228 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:38Z","lastTransitionTime":"2025-10-02T21:24:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.140192 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.140251 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.140263 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.140282 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.140296 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:38Z","lastTransitionTime":"2025-10-02T21:24:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.243259 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.243295 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.243305 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.243321 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.243333 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:38Z","lastTransitionTime":"2025-10-02T21:24:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.346290 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.346353 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.346373 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.346396 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.346440 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:38Z","lastTransitionTime":"2025-10-02T21:24:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.448975 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.449013 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.449024 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.449037 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.449047 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:38Z","lastTransitionTime":"2025-10-02T21:24:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.551763 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.551810 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.551818 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.551833 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.551845 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:38Z","lastTransitionTime":"2025-10-02T21:24:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.603192 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:38 crc kubenswrapper[4636]: E1002 21:24:38.603404 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.654826 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.654889 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.654906 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.654933 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.654949 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:38Z","lastTransitionTime":"2025-10-02T21:24:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.757301 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.757349 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.757364 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.757382 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.757394 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:38Z","lastTransitionTime":"2025-10-02T21:24:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.860162 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.860213 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.860225 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.860245 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.860257 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:38Z","lastTransitionTime":"2025-10-02T21:24:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.963222 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.963271 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.963283 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.963302 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:38 crc kubenswrapper[4636]: I1002 21:24:38.963314 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:38Z","lastTransitionTime":"2025-10-02T21:24:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.065693 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.065780 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.065798 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.065823 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.065840 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:39Z","lastTransitionTime":"2025-10-02T21:24:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.167625 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.167670 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.167682 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.167699 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.167710 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:39Z","lastTransitionTime":"2025-10-02T21:24:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.270604 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.270672 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.270690 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.270715 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.270733 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:39Z","lastTransitionTime":"2025-10-02T21:24:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.373200 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.373254 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.373263 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.373286 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.373297 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:39Z","lastTransitionTime":"2025-10-02T21:24:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.476015 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.476767 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.476890 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.477043 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.477183 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:39Z","lastTransitionTime":"2025-10-02T21:24:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.580438 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.580829 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.580957 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.581050 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.581124 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:39Z","lastTransitionTime":"2025-10-02T21:24:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.602928 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:39 crc kubenswrapper[4636]: E1002 21:24:39.603711 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.604503 4636 scope.go:117] "RemoveContainer" containerID="cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb" Oct 02 21:24:39 crc kubenswrapper[4636]: E1002 21:24:39.607591 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.611025 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.611141 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:39 crc kubenswrapper[4636]: E1002 21:24:39.611305 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:39 crc kubenswrapper[4636]: E1002 21:24:39.611415 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.624141 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.641510 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.657227 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.674201 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.683611 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.684469 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.684550 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.684565 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.684591 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.684630 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:39Z","lastTransitionTime":"2025-10-02T21:24:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.696462 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.723419 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:25Z\\\",\\\"message\\\":\\\"er: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z]\\\\nI1002 21:24:25.634353 6273 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1002 21:24:25.634367 6273 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634373 6273 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1002 21:24:25.634378 6273 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634383 6273 base_network_controller_pods.go:477] [default/openshift-multus/network-metri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.735943 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.746309 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.761849 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.774332 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.786019 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.787415 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.787440 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.787493 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.787523 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.787795 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:39Z","lastTransitionTime":"2025-10-02T21:24:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.800145 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.817450 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.830771 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.844597 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.860916 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:39Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.891016 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.891439 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.891505 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.891593 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.891712 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:39Z","lastTransitionTime":"2025-10-02T21:24:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.995863 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.995904 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.995913 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.995934 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:39 crc kubenswrapper[4636]: I1002 21:24:39.995942 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:39Z","lastTransitionTime":"2025-10-02T21:24:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.098109 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.098151 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.098164 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.098179 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.098189 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:40Z","lastTransitionTime":"2025-10-02T21:24:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.200508 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.200545 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.200556 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.200570 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.200579 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:40Z","lastTransitionTime":"2025-10-02T21:24:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.304311 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.304343 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.304354 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.304370 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.304380 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:40Z","lastTransitionTime":"2025-10-02T21:24:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.407287 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.407330 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.407339 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.407358 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.407369 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:40Z","lastTransitionTime":"2025-10-02T21:24:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.510260 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.510318 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.510339 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.510364 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.510383 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:40Z","lastTransitionTime":"2025-10-02T21:24:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.603378 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:40 crc kubenswrapper[4636]: E1002 21:24:40.603623 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.613897 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.613941 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.613955 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.613975 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.613991 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:40Z","lastTransitionTime":"2025-10-02T21:24:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.716403 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.716454 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.716467 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.716484 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.716501 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:40Z","lastTransitionTime":"2025-10-02T21:24:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.819614 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.819671 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.819684 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.819716 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.819728 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:40Z","lastTransitionTime":"2025-10-02T21:24:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.927458 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.927496 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.927505 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.927518 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:40 crc kubenswrapper[4636]: I1002 21:24:40.927528 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:40Z","lastTransitionTime":"2025-10-02T21:24:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.030273 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.030373 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.030395 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.030453 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.030473 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:41Z","lastTransitionTime":"2025-10-02T21:24:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.132936 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.132970 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.132981 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.132995 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.133010 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:41Z","lastTransitionTime":"2025-10-02T21:24:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.234844 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.234878 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.234888 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.234902 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.234915 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:41Z","lastTransitionTime":"2025-10-02T21:24:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.336684 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.336709 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.336717 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.336729 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.336738 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:41Z","lastTransitionTime":"2025-10-02T21:24:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.439006 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.439048 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.439060 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.439076 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.439087 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:41Z","lastTransitionTime":"2025-10-02T21:24:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.541699 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.542066 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.542139 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.542206 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.542267 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:41Z","lastTransitionTime":"2025-10-02T21:24:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.603288 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.603451 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.603980 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:41 crc kubenswrapper[4636]: E1002 21:24:41.604247 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:41 crc kubenswrapper[4636]: E1002 21:24:41.604495 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:41 crc kubenswrapper[4636]: E1002 21:24:41.604737 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.645328 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.645400 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.645425 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.645457 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.645482 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:41Z","lastTransitionTime":"2025-10-02T21:24:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.748172 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.748553 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.748622 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.748711 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.748804 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:41Z","lastTransitionTime":"2025-10-02T21:24:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.850725 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.850794 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.850804 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.850816 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.850826 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:41Z","lastTransitionTime":"2025-10-02T21:24:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.953287 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.953343 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.953353 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.953370 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:41 crc kubenswrapper[4636]: I1002 21:24:41.953381 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:41Z","lastTransitionTime":"2025-10-02T21:24:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.055883 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.055926 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.055937 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.055953 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.055963 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:42Z","lastTransitionTime":"2025-10-02T21:24:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.158175 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.158223 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.158237 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.158255 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.158266 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:42Z","lastTransitionTime":"2025-10-02T21:24:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.262860 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.263809 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.263933 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.264047 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.264145 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:42Z","lastTransitionTime":"2025-10-02T21:24:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.367441 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.367497 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.367512 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.367545 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.367556 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:42Z","lastTransitionTime":"2025-10-02T21:24:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.470155 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.470201 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.470213 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.470264 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.470287 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:42Z","lastTransitionTime":"2025-10-02T21:24:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.573810 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.573864 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.573875 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.573897 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.573909 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:42Z","lastTransitionTime":"2025-10-02T21:24:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.603160 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:42 crc kubenswrapper[4636]: E1002 21:24:42.603302 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.680374 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.680423 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.680433 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.680450 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.680462 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:42Z","lastTransitionTime":"2025-10-02T21:24:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.784411 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.784506 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.784524 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.784562 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.784584 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:42Z","lastTransitionTime":"2025-10-02T21:24:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.886896 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.886968 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.886997 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.887037 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.887047 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:42Z","lastTransitionTime":"2025-10-02T21:24:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.990097 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.990174 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.990194 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.990227 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:42 crc kubenswrapper[4636]: I1002 21:24:42.990246 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:42Z","lastTransitionTime":"2025-10-02T21:24:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.094361 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.094881 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.094917 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.094939 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.094957 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:43Z","lastTransitionTime":"2025-10-02T21:24:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.126687 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-895mm_3a64b152-90d7-4dd0-be73-17e987476a1c/kube-multus/0.log" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.126796 4636 generic.go:334] "Generic (PLEG): container finished" podID="3a64b152-90d7-4dd0-be73-17e987476a1c" containerID="93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1" exitCode=1 Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.126838 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-895mm" event={"ID":"3a64b152-90d7-4dd0-be73-17e987476a1c","Type":"ContainerDied","Data":"93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1"} Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.127405 4636 scope.go:117] "RemoveContainer" containerID="93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.150418 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.163102 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.180912 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.199500 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.199953 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.199989 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.200002 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.200023 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.200039 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:43Z","lastTransitionTime":"2025-10-02T21:24:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.211658 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.232886 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.251311 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.274484 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.288454 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.302349 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.303671 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.303704 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.303713 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.303730 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.303741 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:43Z","lastTransitionTime":"2025-10-02T21:24:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.314847 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.329126 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:42Z\\\",\\\"message\\\":\\\"2025-10-02T21:23:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021\\\\n2025-10-02T21:23:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021 to /host/opt/cni/bin/\\\\n2025-10-02T21:23:57Z [verbose] multus-daemon started\\\\n2025-10-02T21:23:57Z [verbose] Readiness Indicator file check\\\\n2025-10-02T21:24:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.349421 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:25Z\\\",\\\"message\\\":\\\"er: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z]\\\\nI1002 21:24:25.634353 6273 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1002 21:24:25.634367 6273 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634373 6273 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1002 21:24:25.634378 6273 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634383 6273 base_network_controller_pods.go:477] [default/openshift-multus/network-metri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.360713 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.374292 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.396553 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.405588 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.405607 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.405616 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.405633 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.405668 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:43Z","lastTransitionTime":"2025-10-02T21:24:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.416798 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:43Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.509168 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.509215 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.509227 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.509245 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.509255 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:43Z","lastTransitionTime":"2025-10-02T21:24:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.603734 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.603909 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.603908 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:43 crc kubenswrapper[4636]: E1002 21:24:43.604031 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:43 crc kubenswrapper[4636]: E1002 21:24:43.604218 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:43 crc kubenswrapper[4636]: E1002 21:24:43.604415 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.611178 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.611210 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.611223 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.611240 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.611251 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:43Z","lastTransitionTime":"2025-10-02T21:24:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.713323 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.713377 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.713389 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.713401 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.713411 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:43Z","lastTransitionTime":"2025-10-02T21:24:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.815550 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.815576 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.815584 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.815597 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.815607 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:43Z","lastTransitionTime":"2025-10-02T21:24:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.917527 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.917570 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.917581 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.917597 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:43 crc kubenswrapper[4636]: I1002 21:24:43.917609 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:43Z","lastTransitionTime":"2025-10-02T21:24:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.021483 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.021546 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.021558 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.021582 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.021594 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:44Z","lastTransitionTime":"2025-10-02T21:24:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.124285 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.124338 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.124349 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.124369 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.124385 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:44Z","lastTransitionTime":"2025-10-02T21:24:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.134798 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-895mm_3a64b152-90d7-4dd0-be73-17e987476a1c/kube-multus/0.log" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.134871 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-895mm" event={"ID":"3a64b152-90d7-4dd0-be73-17e987476a1c","Type":"ContainerStarted","Data":"b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d"} Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.163256 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.184663 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.206704 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.227973 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.228066 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.228092 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.228119 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.228140 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:44Z","lastTransitionTime":"2025-10-02T21:24:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.236123 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.259212 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.283097 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.305915 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.324321 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.332455 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.332544 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.332574 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.332608 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.332635 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:44Z","lastTransitionTime":"2025-10-02T21:24:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.353949 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:42Z\\\",\\\"message\\\":\\\"2025-10-02T21:23:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021\\\\n2025-10-02T21:23:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021 to /host/opt/cni/bin/\\\\n2025-10-02T21:23:57Z [verbose] multus-daemon started\\\\n2025-10-02T21:23:57Z [verbose] Readiness Indicator file check\\\\n2025-10-02T21:24:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.388778 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:25Z\\\",\\\"message\\\":\\\"er: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z]\\\\nI1002 21:24:25.634353 6273 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1002 21:24:25.634367 6273 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634373 6273 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1002 21:24:25.634378 6273 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634383 6273 base_network_controller_pods.go:477] [default/openshift-multus/network-metri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.406941 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.426773 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.435420 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.435487 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.435508 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.435539 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.435556 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:44Z","lastTransitionTime":"2025-10-02T21:24:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.444009 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.466519 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.484182 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.504110 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.527914 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:44Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.540223 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.540266 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.540280 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.540309 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.540332 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:44Z","lastTransitionTime":"2025-10-02T21:24:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.603517 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:44 crc kubenswrapper[4636]: E1002 21:24:44.603680 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.643813 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.643883 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.643897 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.643923 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.643943 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:44Z","lastTransitionTime":"2025-10-02T21:24:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.747348 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.747409 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.747429 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.747456 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.747475 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:44Z","lastTransitionTime":"2025-10-02T21:24:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.850856 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.850919 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.850948 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.850977 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.850996 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:44Z","lastTransitionTime":"2025-10-02T21:24:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.962870 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.962990 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.963001 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.963018 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:44 crc kubenswrapper[4636]: I1002 21:24:44.963033 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:44Z","lastTransitionTime":"2025-10-02T21:24:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.065643 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.065731 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.065799 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.065836 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.065860 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:45Z","lastTransitionTime":"2025-10-02T21:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.168377 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.168443 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.168465 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.168500 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.168536 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:45Z","lastTransitionTime":"2025-10-02T21:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.272019 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.272475 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.272644 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.272813 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.272992 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:45Z","lastTransitionTime":"2025-10-02T21:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.376314 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.376371 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.376390 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.376422 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.376444 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:45Z","lastTransitionTime":"2025-10-02T21:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.479377 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.479470 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.479488 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.479513 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.479532 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:45Z","lastTransitionTime":"2025-10-02T21:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.583174 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.583230 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.583249 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.583277 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.583298 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:45Z","lastTransitionTime":"2025-10-02T21:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.603586 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.603587 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:45 crc kubenswrapper[4636]: E1002 21:24:45.605035 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:45 crc kubenswrapper[4636]: E1002 21:24:45.605118 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.604217 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:45 crc kubenswrapper[4636]: E1002 21:24:45.605198 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.687708 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.687933 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.687998 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.688119 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.688215 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:45Z","lastTransitionTime":"2025-10-02T21:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.792332 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.792502 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.792530 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.792568 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.792588 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:45Z","lastTransitionTime":"2025-10-02T21:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.895884 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.895973 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.895996 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.896031 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:45 crc kubenswrapper[4636]: I1002 21:24:45.896055 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:45Z","lastTransitionTime":"2025-10-02T21:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:45.999823 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:45.999914 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:45.999965 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:45.999990 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.000007 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:45Z","lastTransitionTime":"2025-10-02T21:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.103855 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.103920 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.103941 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.103968 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.103992 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.207079 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.207135 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.207153 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.207181 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.207202 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.311039 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.311112 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.311132 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.311164 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.311183 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.414824 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.414899 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.414925 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.414959 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.414983 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.518600 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.518644 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.518656 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.518676 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.518693 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.603464 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:46 crc kubenswrapper[4636]: E1002 21:24:46.603886 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.621565 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.621612 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.621630 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.621657 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.621677 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.726060 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.726142 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.726166 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.726202 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.726227 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.802739 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.802845 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.802860 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.802885 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.802902 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: E1002 21:24:46.824302 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:46Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.830848 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.830921 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.830942 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.830971 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.831003 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: E1002 21:24:46.857225 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:46Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.863669 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.863716 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.863729 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.863785 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.863802 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: E1002 21:24:46.883008 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:46Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.888495 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.888572 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.888599 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.888635 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.888662 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: E1002 21:24:46.905473 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:46Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.912906 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.912956 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.912966 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.912988 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.912999 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:46 crc kubenswrapper[4636]: E1002 21:24:46.944097 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:46Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:46 crc kubenswrapper[4636]: E1002 21:24:46.944246 4636 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.946578 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.946657 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.946671 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.946695 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:46 crc kubenswrapper[4636]: I1002 21:24:46.946710 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:46Z","lastTransitionTime":"2025-10-02T21:24:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.050183 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.050222 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.050234 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.050255 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.050268 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:47Z","lastTransitionTime":"2025-10-02T21:24:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.153399 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.153474 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.153498 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.153538 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.153564 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:47Z","lastTransitionTime":"2025-10-02T21:24:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.257593 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.257671 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.257696 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.257732 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.257793 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:47Z","lastTransitionTime":"2025-10-02T21:24:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.361440 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.361517 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.361539 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.361571 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.361593 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:47Z","lastTransitionTime":"2025-10-02T21:24:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.465139 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.465221 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.465242 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.465274 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.465298 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:47Z","lastTransitionTime":"2025-10-02T21:24:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.568518 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.568583 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.568595 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.568613 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.568643 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:47Z","lastTransitionTime":"2025-10-02T21:24:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.603008 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.603098 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.603135 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:47 crc kubenswrapper[4636]: E1002 21:24:47.603278 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:47 crc kubenswrapper[4636]: E1002 21:24:47.603459 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:47 crc kubenswrapper[4636]: E1002 21:24:47.603529 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.671728 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.671834 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.671854 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.671885 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.671909 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:47Z","lastTransitionTime":"2025-10-02T21:24:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.775915 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.775968 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.775984 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.776009 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.776026 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:47Z","lastTransitionTime":"2025-10-02T21:24:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.879619 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.879681 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.879700 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.879727 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.879775 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:47Z","lastTransitionTime":"2025-10-02T21:24:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.983540 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.983620 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.983650 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.983683 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:47 crc kubenswrapper[4636]: I1002 21:24:47.983710 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:47Z","lastTransitionTime":"2025-10-02T21:24:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.086701 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.086783 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.086794 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.086816 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.086829 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:48Z","lastTransitionTime":"2025-10-02T21:24:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.190946 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.191009 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.191026 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.191055 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.191074 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:48Z","lastTransitionTime":"2025-10-02T21:24:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.295572 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.295625 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.295635 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.295654 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.295666 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:48Z","lastTransitionTime":"2025-10-02T21:24:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.399846 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.399912 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.399929 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.399960 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.399979 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:48Z","lastTransitionTime":"2025-10-02T21:24:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.503384 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.503449 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.503471 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.503503 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.503525 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:48Z","lastTransitionTime":"2025-10-02T21:24:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.603359 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:48 crc kubenswrapper[4636]: E1002 21:24:48.603798 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.606381 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.606428 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.606446 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.606470 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.606490 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:48Z","lastTransitionTime":"2025-10-02T21:24:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.710791 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.710861 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.710953 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.711043 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.711067 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:48Z","lastTransitionTime":"2025-10-02T21:24:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.815307 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.815373 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.815392 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.815422 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.815443 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:48Z","lastTransitionTime":"2025-10-02T21:24:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.918301 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.918411 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.918442 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.918479 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:48 crc kubenswrapper[4636]: I1002 21:24:48.918504 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:48Z","lastTransitionTime":"2025-10-02T21:24:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.022486 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.022577 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.022603 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.022632 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.022677 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:49Z","lastTransitionTime":"2025-10-02T21:24:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.126439 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.126491 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.126509 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.126539 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.126559 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:49Z","lastTransitionTime":"2025-10-02T21:24:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.230375 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.230431 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.230453 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.230481 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.230499 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:49Z","lastTransitionTime":"2025-10-02T21:24:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.333974 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.334025 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.334041 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.334065 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.334082 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:49Z","lastTransitionTime":"2025-10-02T21:24:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.437865 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.437945 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.437968 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.438001 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.438023 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:49Z","lastTransitionTime":"2025-10-02T21:24:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.541213 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.541618 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.541859 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.542026 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.542191 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:49Z","lastTransitionTime":"2025-10-02T21:24:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.603393 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.603469 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:49 crc kubenswrapper[4636]: E1002 21:24:49.604462 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:49 crc kubenswrapper[4636]: E1002 21:24:49.604493 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.603659 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:49 crc kubenswrapper[4636]: E1002 21:24:49.605075 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.619357 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.635465 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.646149 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.646214 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.646227 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.646294 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.646306 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:49Z","lastTransitionTime":"2025-10-02T21:24:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.650849 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.666392 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.681829 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.702584 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:42Z\\\",\\\"message\\\":\\\"2025-10-02T21:23:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021\\\\n2025-10-02T21:23:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021 to /host/opt/cni/bin/\\\\n2025-10-02T21:23:57Z [verbose] multus-daemon started\\\\n2025-10-02T21:23:57Z [verbose] Readiness Indicator file check\\\\n2025-10-02T21:24:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.738623 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:25Z\\\",\\\"message\\\":\\\"er: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z]\\\\nI1002 21:24:25.634353 6273 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1002 21:24:25.634367 6273 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634373 6273 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1002 21:24:25.634378 6273 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634383 6273 base_network_controller_pods.go:477] [default/openshift-multus/network-metri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.749260 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.749306 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.749320 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.749348 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.749362 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:49Z","lastTransitionTime":"2025-10-02T21:24:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.755030 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.768560 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.794609 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.817813 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.833808 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.851495 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.851789 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.851886 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.851974 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.852073 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:49Z","lastTransitionTime":"2025-10-02T21:24:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.877903 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.899396 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.915551 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.931082 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.954808 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:49Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.955110 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.955182 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.955206 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.955238 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:49 crc kubenswrapper[4636]: I1002 21:24:49.955257 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:49Z","lastTransitionTime":"2025-10-02T21:24:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.058606 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.058681 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.058700 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.058729 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.058774 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:50Z","lastTransitionTime":"2025-10-02T21:24:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.161772 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.161829 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.161850 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.161877 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.161893 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:50Z","lastTransitionTime":"2025-10-02T21:24:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.264676 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.264773 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.264792 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.264836 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.264851 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:50Z","lastTransitionTime":"2025-10-02T21:24:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.368146 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.368218 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.368241 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.368271 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.368296 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:50Z","lastTransitionTime":"2025-10-02T21:24:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.472038 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.472305 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.472331 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.472378 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.472406 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:50Z","lastTransitionTime":"2025-10-02T21:24:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.575864 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.575987 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.576186 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.576204 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.576219 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:50Z","lastTransitionTime":"2025-10-02T21:24:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.603023 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:50 crc kubenswrapper[4636]: E1002 21:24:50.603157 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.679730 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.679788 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.679798 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.679812 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.679824 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:50Z","lastTransitionTime":"2025-10-02T21:24:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.783492 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.783555 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.783572 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.783596 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.783613 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:50Z","lastTransitionTime":"2025-10-02T21:24:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.887383 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.887469 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.887510 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.887543 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.887568 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:50Z","lastTransitionTime":"2025-10-02T21:24:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.991335 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.991441 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.991466 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.991509 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:50 crc kubenswrapper[4636]: I1002 21:24:50.991531 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:50Z","lastTransitionTime":"2025-10-02T21:24:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.096246 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.096334 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.096363 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.096396 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.096419 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:51Z","lastTransitionTime":"2025-10-02T21:24:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.199641 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.199701 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.199719 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.199779 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.199821 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:51Z","lastTransitionTime":"2025-10-02T21:24:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.303876 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.304141 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.304167 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.304197 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.304219 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:51Z","lastTransitionTime":"2025-10-02T21:24:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.408161 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.408224 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.408241 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.408266 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.408279 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:51Z","lastTransitionTime":"2025-10-02T21:24:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.511354 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.511429 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.511453 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.511486 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.511509 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:51Z","lastTransitionTime":"2025-10-02T21:24:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.603136 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.603236 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.603162 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:51 crc kubenswrapper[4636]: E1002 21:24:51.603617 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.603937 4636 scope.go:117] "RemoveContainer" containerID="cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb" Oct 02 21:24:51 crc kubenswrapper[4636]: E1002 21:24:51.604003 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:51 crc kubenswrapper[4636]: E1002 21:24:51.603873 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.621461 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.621880 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.622059 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.622265 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.622422 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:51Z","lastTransitionTime":"2025-10-02T21:24:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.725189 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.725232 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.725245 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.725266 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.725281 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:51Z","lastTransitionTime":"2025-10-02T21:24:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.827587 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.827642 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.827659 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.827690 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.827708 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:51Z","lastTransitionTime":"2025-10-02T21:24:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.930702 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.930788 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.930810 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.930839 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:51 crc kubenswrapper[4636]: I1002 21:24:51.930863 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:51Z","lastTransitionTime":"2025-10-02T21:24:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.034589 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.035067 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.035089 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.035118 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.035137 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:52Z","lastTransitionTime":"2025-10-02T21:24:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.137310 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.137345 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.137354 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.137367 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.137376 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:52Z","lastTransitionTime":"2025-10-02T21:24:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.167869 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/2.log" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.171088 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0"} Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.171774 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.203827 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.223924 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.239644 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.239704 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.239716 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.239747 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.239772 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:52Z","lastTransitionTime":"2025-10-02T21:24:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.245956 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:42Z\\\",\\\"message\\\":\\\"2025-10-02T21:23:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021\\\\n2025-10-02T21:23:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021 to /host/opt/cni/bin/\\\\n2025-10-02T21:23:57Z [verbose] multus-daemon started\\\\n2025-10-02T21:23:57Z [verbose] Readiness Indicator file check\\\\n2025-10-02T21:24:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.267133 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:25Z\\\",\\\"message\\\":\\\"er: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z]\\\\nI1002 21:24:25.634353 6273 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1002 21:24:25.634367 6273 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634373 6273 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1002 21:24:25.634378 6273 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634383 6273 base_network_controller_pods.go:477] [default/openshift-multus/network-metri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.276663 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.286246 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.297661 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.309537 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.321615 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.342227 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.342274 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.342285 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.342303 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.342315 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:52Z","lastTransitionTime":"2025-10-02T21:24:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.343619 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.358916 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.373082 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.387467 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.402249 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.416802 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.428940 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.445303 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.445357 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.445371 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.445393 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.445411 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:52Z","lastTransitionTime":"2025-10-02T21:24:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.446736 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:52Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.548292 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.548336 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.548346 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.548365 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.548380 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:52Z","lastTransitionTime":"2025-10-02T21:24:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.602902 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:52 crc kubenswrapper[4636]: E1002 21:24:52.603072 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.619476 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.652313 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.652364 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.652378 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.652401 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.652417 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:52Z","lastTransitionTime":"2025-10-02T21:24:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.755869 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.755918 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.755932 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.755953 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.755970 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:52Z","lastTransitionTime":"2025-10-02T21:24:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.858998 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.859040 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.859054 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.859072 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.859085 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:52Z","lastTransitionTime":"2025-10-02T21:24:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.962670 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.962734 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.962770 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.962793 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:52 crc kubenswrapper[4636]: I1002 21:24:52.962805 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:52Z","lastTransitionTime":"2025-10-02T21:24:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.066241 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.066303 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.066316 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.066341 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.066354 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:53Z","lastTransitionTime":"2025-10-02T21:24:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.169250 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.169324 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.169342 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.169371 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.169390 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:53Z","lastTransitionTime":"2025-10-02T21:24:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.178489 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/3.log" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.179666 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/2.log" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.185433 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" exitCode=1 Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.185597 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0"} Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.185706 4636 scope.go:117] "RemoveContainer" containerID="cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.187131 4636 scope.go:117] "RemoveContainer" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.187438 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.210081 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.227550 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.243965 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.266409 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.274297 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.274375 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.274395 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.274424 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.274446 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:53Z","lastTransitionTime":"2025-10-02T21:24:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.290357 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.309024 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.328484 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.346990 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.364845 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:42Z\\\",\\\"message\\\":\\\"2025-10-02T21:23:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021\\\\n2025-10-02T21:23:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021 to /host/opt/cni/bin/\\\\n2025-10-02T21:23:57Z [verbose] multus-daemon started\\\\n2025-10-02T21:23:57Z [verbose] Readiness Indicator file check\\\\n2025-10-02T21:24:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.377678 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.377720 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.377734 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.377774 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.377790 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:53Z","lastTransitionTime":"2025-10-02T21:24:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.388513 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbfa73e401706ca09496daee3557cc9c2123cb8064d7af4cba7249fa7ea22adb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:25Z\\\",\\\"message\\\":\\\"er: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:25Z is after 2025-08-24T17:21:41Z]\\\\nI1002 21:24:25.634353 6273 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1002 21:24:25.634367 6273 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634373 6273 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1002 21:24:25.634378 6273 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-9qm8w\\\\nI1002 21:24:25.634383 6273 base_network_controller_pods.go:477] [default/openshift-multus/network-metri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:52Z\\\",\\\"message\\\":\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string(nil), Routers:[]string{\\\\\\\"GR_crc\\\\\\\"}, Groups:[]string(nil)}, services.LB{Name:\\\\\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nI1002 21:24:52.620430 6641 services_controller.go:453] Built service default/kubernetes template LB for network=default: []services.LB{}\\\\nI1002 21:24:52.620448 6641 services_controller.go:454] Service default/kubernetes for network=default has 0 cluster-wide, 1 per-node configs, 0 template configs, making 0 (cluster) 2 (per node) and 0 (template) load balancers\\\\nF1002 21:24:52.620453 6641 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.401836 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.415630 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.428160 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.445252 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.462242 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.476145 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.480706 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.480739 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.480773 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.480796 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.480807 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:53Z","lastTransitionTime":"2025-10-02T21:24:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.494171 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"426dec84-3600-43b6-9e99-893156107fff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e5aecba03015137e66b951dc3c3a23f42866c1c501211aca641fd30d28e594d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://568beaa08ecec10704dce18c1dd4fa9a26aaf3b763b52ee2e66fab59f1cd009b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://568beaa08ecec10704dce18c1dd4fa9a26aaf3b763b52ee2e66fab59f1cd009b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.507503 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:53Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.583202 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.583245 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.583257 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.583275 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.583287 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:53Z","lastTransitionTime":"2025-10-02T21:24:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.603294 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.603650 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.603307 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.603939 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.603296 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.604117 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.678528 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.679068 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:25:57.678992274 +0000 UTC m=+149.002000343 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.689151 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.689242 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.689269 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.689309 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.689336 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:53Z","lastTransitionTime":"2025-10-02T21:24:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.780704 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.780890 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.780919 4636 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.780977 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.781036 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:25:57.781005293 +0000 UTC m=+149.104013352 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.781073 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.781137 4636 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.781189 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.781213 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.781236 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.781238 4636 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.781263 4636 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.781266 4636 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.781272 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 21:25:57.781248969 +0000 UTC m=+149.104256988 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.781381 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 21:25:57.781337732 +0000 UTC m=+149.104345761 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:53 crc kubenswrapper[4636]: E1002 21:24:53.781409 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 21:25:57.781398593 +0000 UTC m=+149.104406842 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.792202 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.792359 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.792467 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.792542 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.792605 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:53Z","lastTransitionTime":"2025-10-02T21:24:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.896295 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.896338 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.896348 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.896364 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.896375 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:53Z","lastTransitionTime":"2025-10-02T21:24:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.999373 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.999417 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.999430 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.999445 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:53 crc kubenswrapper[4636]: I1002 21:24:53.999456 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:53Z","lastTransitionTime":"2025-10-02T21:24:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.102704 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.102789 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.102800 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.102827 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.102845 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:54Z","lastTransitionTime":"2025-10-02T21:24:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.191081 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/3.log" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.194849 4636 scope.go:117] "RemoveContainer" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:24:54 crc kubenswrapper[4636]: E1002 21:24:54.194993 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.204535 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.204567 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.204579 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.204595 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.204607 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:54Z","lastTransitionTime":"2025-10-02T21:24:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.211360 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.226427 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.239879 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.253443 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.267102 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.277069 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.294487 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:52Z\\\",\\\"message\\\":\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string(nil), Routers:[]string{\\\\\\\"GR_crc\\\\\\\"}, Groups:[]string(nil)}, services.LB{Name:\\\\\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nI1002 21:24:52.620430 6641 services_controller.go:453] Built service default/kubernetes template LB for network=default: []services.LB{}\\\\nI1002 21:24:52.620448 6641 services_controller.go:454] Service default/kubernetes for network=default has 0 cluster-wide, 1 per-node configs, 0 template configs, making 0 (cluster) 2 (per node) and 0 (template) load balancers\\\\nF1002 21:24:52.620453 6641 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.304535 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.306072 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.306217 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.306310 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.306412 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.306515 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:54Z","lastTransitionTime":"2025-10-02T21:24:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.315696 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.326137 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.336945 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.347322 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.356776 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.367965 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:42Z\\\",\\\"message\\\":\\\"2025-10-02T21:23:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021\\\\n2025-10-02T21:23:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021 to /host/opt/cni/bin/\\\\n2025-10-02T21:23:57Z [verbose] multus-daemon started\\\\n2025-10-02T21:23:57Z [verbose] Readiness Indicator file check\\\\n2025-10-02T21:24:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.376678 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"426dec84-3600-43b6-9e99-893156107fff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e5aecba03015137e66b951dc3c3a23f42866c1c501211aca641fd30d28e594d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://568beaa08ecec10704dce18c1dd4fa9a26aaf3b763b52ee2e66fab59f1cd009b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://568beaa08ecec10704dce18c1dd4fa9a26aaf3b763b52ee2e66fab59f1cd009b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.384986 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.397742 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.408314 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.408342 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.408350 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.408363 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.408371 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:54Z","lastTransitionTime":"2025-10-02T21:24:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.410425 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:54Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.511973 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.512034 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.512052 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.512079 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.512100 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:54Z","lastTransitionTime":"2025-10-02T21:24:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.603591 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:54 crc kubenswrapper[4636]: E1002 21:24:54.603797 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.614566 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.614614 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.614633 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.614654 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.614672 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:54Z","lastTransitionTime":"2025-10-02T21:24:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.717405 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.717445 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.717463 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.717484 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.717502 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:54Z","lastTransitionTime":"2025-10-02T21:24:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.821215 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.821667 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.821859 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.822010 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.822181 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:54Z","lastTransitionTime":"2025-10-02T21:24:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.924875 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.925253 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.925418 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.925575 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:54 crc kubenswrapper[4636]: I1002 21:24:54.925712 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:54Z","lastTransitionTime":"2025-10-02T21:24:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.030088 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.030166 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.030188 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.030226 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.030251 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:55Z","lastTransitionTime":"2025-10-02T21:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.133538 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.133612 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.133631 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.133665 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.133686 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:55Z","lastTransitionTime":"2025-10-02T21:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.236567 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.236647 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.236666 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.236692 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.236713 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:55Z","lastTransitionTime":"2025-10-02T21:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.339811 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.339878 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.339904 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.339934 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.339984 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:55Z","lastTransitionTime":"2025-10-02T21:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.445064 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.445607 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.445848 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.446023 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.446179 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:55Z","lastTransitionTime":"2025-10-02T21:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.549966 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.550289 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.550381 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.550476 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.550570 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:55Z","lastTransitionTime":"2025-10-02T21:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.603349 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.603349 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.603613 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:55 crc kubenswrapper[4636]: E1002 21:24:55.603946 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:55 crc kubenswrapper[4636]: E1002 21:24:55.604033 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:55 crc kubenswrapper[4636]: E1002 21:24:55.604125 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.656812 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.657315 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.657519 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.658038 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.658291 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:55Z","lastTransitionTime":"2025-10-02T21:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.762817 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.762886 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.762904 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.762932 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.762951 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:55Z","lastTransitionTime":"2025-10-02T21:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.866500 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.866560 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.866583 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.866614 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.866659 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:55Z","lastTransitionTime":"2025-10-02T21:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.969791 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.970192 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.970355 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.970502 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:55 crc kubenswrapper[4636]: I1002 21:24:55.970645 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:55Z","lastTransitionTime":"2025-10-02T21:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.074689 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.074785 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.074809 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.074838 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.074859 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:56Z","lastTransitionTime":"2025-10-02T21:24:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.177683 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.177746 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.177805 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.177832 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.177850 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:56Z","lastTransitionTime":"2025-10-02T21:24:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.279982 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.280440 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.280525 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.280607 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.280681 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:56Z","lastTransitionTime":"2025-10-02T21:24:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.383505 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.383538 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.383546 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.383560 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.383569 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:56Z","lastTransitionTime":"2025-10-02T21:24:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.485905 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.485996 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.486013 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.486039 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.486056 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:56Z","lastTransitionTime":"2025-10-02T21:24:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.588639 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.589005 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.589124 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.589260 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.589383 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:56Z","lastTransitionTime":"2025-10-02T21:24:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.603011 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:56 crc kubenswrapper[4636]: E1002 21:24:56.603136 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.692240 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.692291 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.692303 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.692323 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.692336 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:56Z","lastTransitionTime":"2025-10-02T21:24:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.795262 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.795318 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.795330 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.795347 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.795358 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:56Z","lastTransitionTime":"2025-10-02T21:24:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.899139 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.899457 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.899583 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.899824 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.900382 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:56Z","lastTransitionTime":"2025-10-02T21:24:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.973435 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.973476 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.973492 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.973513 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.973528 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:56Z","lastTransitionTime":"2025-10-02T21:24:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:56 crc kubenswrapper[4636]: E1002 21:24:56.991664 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:56Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.997644 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.997888 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.998055 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.998246 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:56 crc kubenswrapper[4636]: I1002 21:24:56.998393 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:56Z","lastTransitionTime":"2025-10-02T21:24:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: E1002 21:24:57.016573 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.021714 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.021945 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.022091 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.022228 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.022360 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: E1002 21:24:57.043063 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.048495 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.048532 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.048549 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.048571 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.048588 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: E1002 21:24:57.069619 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.074028 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.074202 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.074355 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.074493 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.074663 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: E1002 21:24:57.096098 4636 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cf7a8497-d011-4aa1-ae8c-a105b6bba068\\\",\\\"systemUUID\\\":\\\"3ae1382e-dfe6-49ba-a6ed-d50a2758b26a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:57Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:57 crc kubenswrapper[4636]: E1002 21:24:57.096784 4636 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.099114 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.099158 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.099175 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.099197 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.099215 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.201872 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.201917 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.201930 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.201947 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.201961 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.304931 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.304962 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.304969 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.304982 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.304990 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.407679 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.408137 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.408341 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.408523 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.408695 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.512207 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.512265 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.512281 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.512305 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.512323 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.603234 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.603239 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.603326 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:57 crc kubenswrapper[4636]: E1002 21:24:57.603445 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:57 crc kubenswrapper[4636]: E1002 21:24:57.603589 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:57 crc kubenswrapper[4636]: E1002 21:24:57.603679 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.614097 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.614298 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.614397 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.614493 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.614595 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.717884 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.717914 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.717924 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.717940 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.717951 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.819598 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.819646 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.819656 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.819671 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.819697 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.922501 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.922534 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.922568 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.922584 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:57 crc kubenswrapper[4636]: I1002 21:24:57.922612 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:57Z","lastTransitionTime":"2025-10-02T21:24:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.024723 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.025052 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.025074 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.025098 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.025114 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:58Z","lastTransitionTime":"2025-10-02T21:24:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.127425 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.127511 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.127533 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.127561 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.127583 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:58Z","lastTransitionTime":"2025-10-02T21:24:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.229294 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.229383 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.229402 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.229424 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.229440 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:58Z","lastTransitionTime":"2025-10-02T21:24:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.331677 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.331703 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.331711 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.331723 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.331895 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:58Z","lastTransitionTime":"2025-10-02T21:24:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.433793 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.433833 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.433847 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.433866 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.433882 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:58Z","lastTransitionTime":"2025-10-02T21:24:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.536436 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.536471 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.536479 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.536493 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.536503 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:58Z","lastTransitionTime":"2025-10-02T21:24:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.602605 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:24:58 crc kubenswrapper[4636]: E1002 21:24:58.602745 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.639467 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.639521 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.639536 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.639557 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.639571 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:58Z","lastTransitionTime":"2025-10-02T21:24:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.741881 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.741924 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.741936 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.741955 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.741968 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:58Z","lastTransitionTime":"2025-10-02T21:24:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.844793 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.844833 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.844847 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.844866 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.844879 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:58Z","lastTransitionTime":"2025-10-02T21:24:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.948012 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.948069 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.948089 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.948116 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:58 crc kubenswrapper[4636]: I1002 21:24:58.948139 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:58Z","lastTransitionTime":"2025-10-02T21:24:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.051720 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.051820 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.051839 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.051863 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.051880 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:59Z","lastTransitionTime":"2025-10-02T21:24:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.155048 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.155117 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.155136 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.155160 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.155177 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:59Z","lastTransitionTime":"2025-10-02T21:24:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.257623 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.257680 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.257700 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.257724 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.257740 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:59Z","lastTransitionTime":"2025-10-02T21:24:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.361207 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.361309 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.361326 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.361350 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.361366 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:59Z","lastTransitionTime":"2025-10-02T21:24:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.465237 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.465312 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.465331 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.465357 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.465377 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:59Z","lastTransitionTime":"2025-10-02T21:24:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.568286 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.568337 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.568539 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.568564 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.568581 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:59Z","lastTransitionTime":"2025-10-02T21:24:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.602824 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.602918 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.603153 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:24:59 crc kubenswrapper[4636]: E1002 21:24:59.604006 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:24:59 crc kubenswrapper[4636]: E1002 21:24:59.604437 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:24:59 crc kubenswrapper[4636]: E1002 21:24:59.604416 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.624973 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.632829 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zssg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e169ed7-2c2e-4623-9f21-330753911ab5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ckrp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zssg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.650376 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"426dec84-3600-43b6-9e99-893156107fff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e5aecba03015137e66b951dc3c3a23f42866c1c501211aca641fd30d28e594d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://568beaa08ecec10704dce18c1dd4fa9a26aaf3b763b52ee2e66fab59f1cd009b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://568beaa08ecec10704dce18c1dd4fa9a26aaf3b763b52ee2e66fab59f1cd009b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.673021 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.673107 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.673127 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.673152 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.673207 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:59Z","lastTransitionTime":"2025-10-02T21:24:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.675575 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21d5632c821442f8d44ea14ae5e8b1fc048ca047a9489c05767849f7cb728966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.699490 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1fdb1cf-4a3b-461e-bd98-00bd4ac75353\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1543010520fc4ef8d3a5c7dbfd93447f00c6afef6a987633f66133df06d57db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81ba21d8817cb3d5332debe2a480cc67e3df23e2871e558305b0140f7c1091fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c95570d02944b15157239a7296675df65863fd302cbc66b81f9dff85372db37f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://563fe2adb8d8787e4b5fa72660f38b0d353b01ed2a9e2792e8e43d4cc11add85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b764fa522ea6a71500eeae616ebe8f1846d9dd59131bde86cfbdc6b3b85d915b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"message\\\":\\\"S_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069877 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 21:23:50.069882 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 21:23:50.069885 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 21:23:50.069889 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 21:23:50.069891 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1002 21:23:50.070040 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1002 21:23:50.086673 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1863760419/tls.crt::/tmp/serving-cert-1863760419/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1759440224\\\\\\\\\\\\\\\" (2025-10-02 21:23:43 +0000 UTC to 2025-11-01 21:23:44 +0000 UTC (now=2025-10-02 21:23:50.086636637 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086820 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759440230\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759440229\\\\\\\\\\\\\\\" (2025-10-02 20:23:49 +0000 UTC to 2026-10-02 20:23:49 +0000 UTC (now=2025-10-02 21:23:50.086796721 +0000 UTC))\\\\\\\"\\\\nI1002 21:23:50.086838 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1002 21:23:50.086868 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1002 21:23:50.086891 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1002 21:23:50.086924 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nF1002 21:23:50.086949 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ce7c0faf0559b23d37172e0d3bde7c3ed5d19c44bed5ca9c86400ca50649a2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://350b5f52c7e4946ebe215368d033d902342c9d59f3be6b04e44ecfd2cddfab87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.721784 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.750990 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2136b2eebc9ebfe731ff7bbc751d7cd21c85cdfebfc10ca465b2bf638e598081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.772948 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.776840 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.776882 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.777100 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.777178 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.777221 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:59Z","lastTransitionTime":"2025-10-02T21:24:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.789799 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a136ab0-a86b-4cf4-a332-8c569e1ca777\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e3e32936164ad36cd2508a8b60fad55928f0e525c187d01cf30d3b3035a34ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rs22p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2l2mr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.811596 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c1f289f-fb2b-4fd8-a6a3-2c573fff134a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd9c2edc2017b9df60b3ac67ee0885331714a92833e9956b23f9fa32cab8f6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d96a58508bb2143295174b6292726bdbda4f297ff8299fb96b3caacae660ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71b45624c8946b6bb7f9d624077f1871541cf2bbb5293b0cf25099853427df32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6180523b0a2cf2d69d7247dd5dfcbb9608087e484e9379c21e0a517052082b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e79da32fb8ffdb2df7e4455d212142cb87add611da97ec332ec683e79716a87d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c3e920a9867c3e9d936f59a6ff68ffa94cf535b92b363c2203ae00d797e2b7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09e1687b5d76fa8e6d74b33b0ce774c80a893375c56ac3da259b36185bc8800c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:24:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxkc6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9qm8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.830305 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e837be-0353-4081-9fdb-8c7318adfa30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b478c63096a0fac68cfc37d25048e277469204912ac7fb3c888ec8ce8c202f93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://755e4e29b3b11c83a95b500e00da22967cce7f5cf3c4ab5f0ed75fd79832a0d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a97f3983285fbd4b3341052b8e8906c3734877be23a5c531c63690faa4bac9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58891694266d811a9a99d1fde266b9cbbf5bed000487b79643df7704aea778a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.842332 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.860373 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb1657ed2560b9e6be849271bdf88777d5d889c3acf2cbde75d625e720dbbb17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5b9259634cccd2352dab899df3c61cbda7b5bbc26815067d45e303807ddb62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.872609 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-586cm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520aa252-e0e0-47e8-bb4c-55579fcfd286\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b1542b9ae312bdb49bab44d12ca8b0c557d75bf7c9311e10389f72cdc5b86cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmxxf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-586cm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.884268 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.884298 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.884307 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.884324 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.884335 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:59Z","lastTransitionTime":"2025-10-02T21:24:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.889211 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-895mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a64b152-90d7-4dd0-be73-17e987476a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:42Z\\\",\\\"message\\\":\\\"2025-10-02T21:23:56+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021\\\\n2025-10-02T21:23:56+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be3f3994-69b6-4f71-96b1-7ccbc3c0d021 to /host/opt/cni/bin/\\\\n2025-10-02T21:23:57Z [verbose] multus-daemon started\\\\n2025-10-02T21:23:57Z [verbose] Readiness Indicator file check\\\\n2025-10-02T21:24:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhh2l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-895mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.918838 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T21:24:52Z\\\",\\\"message\\\":\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string(nil), Routers:[]string{\\\\\\\"GR_crc\\\\\\\"}, Groups:[]string(nil)}, services.LB{Name:\\\\\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nI1002 21:24:52.620430 6641 services_controller.go:453] Built service default/kubernetes template LB for network=default: []services.LB{}\\\\nI1002 21:24:52.620448 6641 services_controller.go:454] Service default/kubernetes for network=default has 0 cluster-wide, 1 per-node configs, 0 template configs, making 0 (cluster) 2 (per node) and 0 (template) load balancers\\\\nF1002 21:24:52.620453 6641 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T21:24:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shwzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7qm8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.930651 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbf92ff8-8b9e-4235-bf6f-8687e1812deb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af589e4613c1e766615bc871734db2dd9ed5007e887045bae8bb3a12d74a5470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frpcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.947897 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e87285e-b0c2-4f4a-87b8-9244f8a6daaa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a8da18ba8a8418ca2ad9f081d29781bcc583e07a30e90d2a9e45465f93f134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b37de273b30f62123c6e70ff12c632d938e17e6c4b587048714bf7be422b5262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfrf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-kkf52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.967862 4636 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4d1bed9-50aa-4379-9edb-e8c92e96a44c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:24:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T21:23:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0f912f7538eec2c1d1e76fac5fc40bca0ee4c5bf04f568e30e24544b2c92351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da2f06ff7b3d7fa608777f45d97180ae2e2ba782163bdafff5882a97d6e0a39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fbe36e300ad2219b3ebc79aca722199d031d47bc8ca3a2e856c24e72b4a3231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T21:23:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f356fc880eaa9ee7b1e78174a5c0b141f95c4aeb97e6e17f00167d2c304f65d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T21:23:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T21:23:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T21:23:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T21:24:59Z is after 2025-08-24T17:21:41Z" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.987072 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.987200 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.987228 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.987259 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:24:59 crc kubenswrapper[4636]: I1002 21:24:59.987281 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:24:59Z","lastTransitionTime":"2025-10-02T21:24:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.089627 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.089683 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.089700 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.089726 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.089776 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:00Z","lastTransitionTime":"2025-10-02T21:25:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.192436 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.192742 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.193077 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.193198 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.193314 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:00Z","lastTransitionTime":"2025-10-02T21:25:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.296434 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.296505 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.296528 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.296551 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.296569 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:00Z","lastTransitionTime":"2025-10-02T21:25:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.398904 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.398969 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.398985 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.399010 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.399028 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:00Z","lastTransitionTime":"2025-10-02T21:25:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.501656 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.501705 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.501713 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.501726 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.501737 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:00Z","lastTransitionTime":"2025-10-02T21:25:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.602622 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:00 crc kubenswrapper[4636]: E1002 21:25:00.603126 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.604829 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.604888 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.604913 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.604941 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.604963 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:00Z","lastTransitionTime":"2025-10-02T21:25:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.707399 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.707465 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.707489 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.707517 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.707540 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:00Z","lastTransitionTime":"2025-10-02T21:25:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.809501 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.809557 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.809578 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.809608 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.809629 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:00Z","lastTransitionTime":"2025-10-02T21:25:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.912560 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.912604 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.912622 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.912644 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:00 crc kubenswrapper[4636]: I1002 21:25:00.912660 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:00Z","lastTransitionTime":"2025-10-02T21:25:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.015245 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.015334 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.015347 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.015370 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.015382 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:01Z","lastTransitionTime":"2025-10-02T21:25:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.119589 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.119639 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.119652 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.119670 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.119682 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:01Z","lastTransitionTime":"2025-10-02T21:25:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.221651 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.221686 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.221697 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.221712 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.221723 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:01Z","lastTransitionTime":"2025-10-02T21:25:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.324243 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.324303 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.324316 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.324337 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.324352 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:01Z","lastTransitionTime":"2025-10-02T21:25:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.428097 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.428174 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.428201 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.428232 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.428255 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:01Z","lastTransitionTime":"2025-10-02T21:25:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.531293 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.531356 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.531384 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.531411 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.531432 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:01Z","lastTransitionTime":"2025-10-02T21:25:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.603528 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:01 crc kubenswrapper[4636]: E1002 21:25:01.604131 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.603941 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.604241 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:01 crc kubenswrapper[4636]: E1002 21:25:01.604520 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:01 crc kubenswrapper[4636]: E1002 21:25:01.604955 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.637141 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.637202 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.637308 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.637359 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.637380 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:01Z","lastTransitionTime":"2025-10-02T21:25:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.740845 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.740908 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.740925 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.740950 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.740970 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:01Z","lastTransitionTime":"2025-10-02T21:25:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.843652 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.843706 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.843719 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.843738 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.843774 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:01Z","lastTransitionTime":"2025-10-02T21:25:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.946681 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.946735 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.946767 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.946788 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:01 crc kubenswrapper[4636]: I1002 21:25:01.946802 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:01Z","lastTransitionTime":"2025-10-02T21:25:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.050497 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.050554 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.050571 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.050598 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.050615 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:02Z","lastTransitionTime":"2025-10-02T21:25:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.154536 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.154614 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.154636 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.154665 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.154686 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:02Z","lastTransitionTime":"2025-10-02T21:25:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.258104 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.258139 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.258148 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.258183 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.258194 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:02Z","lastTransitionTime":"2025-10-02T21:25:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.361116 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.361166 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.361181 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.361200 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.361213 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:02Z","lastTransitionTime":"2025-10-02T21:25:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.464588 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.464626 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.464666 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.464684 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.464698 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:02Z","lastTransitionTime":"2025-10-02T21:25:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.566604 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.566637 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.566646 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.566658 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.566666 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:02Z","lastTransitionTime":"2025-10-02T21:25:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.603628 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:02 crc kubenswrapper[4636]: E1002 21:25:02.603723 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.669015 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.669065 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.669078 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.669095 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.669106 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:02Z","lastTransitionTime":"2025-10-02T21:25:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.771370 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.771426 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.771444 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.771466 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.771483 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:02Z","lastTransitionTime":"2025-10-02T21:25:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.873327 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.873393 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.873404 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.873421 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.873433 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:02Z","lastTransitionTime":"2025-10-02T21:25:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.976615 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.976683 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.976708 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.976736 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:02 crc kubenswrapper[4636]: I1002 21:25:02.976787 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:02Z","lastTransitionTime":"2025-10-02T21:25:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.079740 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.079811 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.079822 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.079839 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.079852 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:03Z","lastTransitionTime":"2025-10-02T21:25:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.182529 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.182575 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.182587 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.182603 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.182614 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:03Z","lastTransitionTime":"2025-10-02T21:25:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.285333 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.285424 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.285441 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.285465 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.285483 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:03Z","lastTransitionTime":"2025-10-02T21:25:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.388288 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.388343 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.388354 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.388372 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.388385 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:03Z","lastTransitionTime":"2025-10-02T21:25:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.491657 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.491704 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.491719 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.491743 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.491795 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:03Z","lastTransitionTime":"2025-10-02T21:25:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.594290 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.594345 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.594363 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.594387 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.594405 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:03Z","lastTransitionTime":"2025-10-02T21:25:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.602981 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.603019 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.602977 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:03 crc kubenswrapper[4636]: E1002 21:25:03.603102 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:03 crc kubenswrapper[4636]: E1002 21:25:03.603192 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:03 crc kubenswrapper[4636]: E1002 21:25:03.603261 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.697417 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.697455 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.697463 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.697476 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.697486 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:03Z","lastTransitionTime":"2025-10-02T21:25:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.800188 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.800532 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.800680 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.800862 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.801006 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:03Z","lastTransitionTime":"2025-10-02T21:25:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.903237 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.903285 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.903295 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.903310 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:03 crc kubenswrapper[4636]: I1002 21:25:03.903320 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:03Z","lastTransitionTime":"2025-10-02T21:25:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.006287 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.006361 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.006378 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.006402 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.006420 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:04Z","lastTransitionTime":"2025-10-02T21:25:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.108572 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.108624 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.108635 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.108649 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.108659 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:04Z","lastTransitionTime":"2025-10-02T21:25:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.211032 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.211074 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.211083 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.211095 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.211104 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:04Z","lastTransitionTime":"2025-10-02T21:25:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.313977 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.314039 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.314049 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.314063 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.314072 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:04Z","lastTransitionTime":"2025-10-02T21:25:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.416650 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.416705 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.416722 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.416745 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.416802 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:04Z","lastTransitionTime":"2025-10-02T21:25:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.520034 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.520089 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.520105 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.520129 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.520152 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:04Z","lastTransitionTime":"2025-10-02T21:25:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.602876 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:04 crc kubenswrapper[4636]: E1002 21:25:04.603067 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.623609 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.623674 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.623695 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.623727 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.624325 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:04Z","lastTransitionTime":"2025-10-02T21:25:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.726506 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.726578 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.726592 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.726607 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.726617 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:04Z","lastTransitionTime":"2025-10-02T21:25:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.829618 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.829676 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.829698 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.829728 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.829786 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:04Z","lastTransitionTime":"2025-10-02T21:25:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.932906 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.932962 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.932980 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.933005 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:04 crc kubenswrapper[4636]: I1002 21:25:04.933024 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:04Z","lastTransitionTime":"2025-10-02T21:25:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.035846 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.036197 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.036373 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.036556 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.036735 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:05Z","lastTransitionTime":"2025-10-02T21:25:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.139589 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.139653 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.139675 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.139702 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.139721 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:05Z","lastTransitionTime":"2025-10-02T21:25:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.242462 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.242517 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.242536 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.242557 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.242571 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:05Z","lastTransitionTime":"2025-10-02T21:25:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.344684 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.345061 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.345214 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.345356 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.345489 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:05Z","lastTransitionTime":"2025-10-02T21:25:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.448209 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.449081 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.449222 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.449396 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.449526 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:05Z","lastTransitionTime":"2025-10-02T21:25:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.551826 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.551861 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.551871 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.551886 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.551896 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:05Z","lastTransitionTime":"2025-10-02T21:25:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.603498 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.603592 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:05 crc kubenswrapper[4636]: E1002 21:25:05.603642 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.603598 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:05 crc kubenswrapper[4636]: E1002 21:25:05.603791 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:05 crc kubenswrapper[4636]: E1002 21:25:05.603833 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.653395 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.653437 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.653447 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.653461 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.653471 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:05Z","lastTransitionTime":"2025-10-02T21:25:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.756459 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.756499 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.756513 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.756528 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.756538 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:05Z","lastTransitionTime":"2025-10-02T21:25:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.859300 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.859343 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.859352 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.859401 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.859411 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:05Z","lastTransitionTime":"2025-10-02T21:25:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.962138 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.962202 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.962218 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.962242 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:05 crc kubenswrapper[4636]: I1002 21:25:05.962260 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:05Z","lastTransitionTime":"2025-10-02T21:25:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.064676 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.065085 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.065239 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.065392 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.065526 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:06Z","lastTransitionTime":"2025-10-02T21:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.168197 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.168253 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.168275 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.168306 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.168325 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:06Z","lastTransitionTime":"2025-10-02T21:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.270382 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.270430 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.270446 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.270470 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.270487 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:06Z","lastTransitionTime":"2025-10-02T21:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.373412 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.373456 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.373472 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.373493 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.373509 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:06Z","lastTransitionTime":"2025-10-02T21:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.476218 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.476293 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.476332 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.476361 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.476380 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:06Z","lastTransitionTime":"2025-10-02T21:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.578961 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.579021 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.579038 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.579059 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.579075 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:06Z","lastTransitionTime":"2025-10-02T21:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.603653 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:06 crc kubenswrapper[4636]: E1002 21:25:06.604087 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.604300 4636 scope.go:117] "RemoveContainer" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:25:06 crc kubenswrapper[4636]: E1002 21:25:06.604463 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.682294 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.682355 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.682379 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.682722 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.682970 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:06Z","lastTransitionTime":"2025-10-02T21:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.786296 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.786345 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.786363 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.786385 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.786403 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:06Z","lastTransitionTime":"2025-10-02T21:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.889677 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.889707 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.889716 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.889728 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.889736 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:06Z","lastTransitionTime":"2025-10-02T21:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.992121 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.992208 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.992217 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.992229 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:06 crc kubenswrapper[4636]: I1002 21:25:06.992238 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:06Z","lastTransitionTime":"2025-10-02T21:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.093967 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.094006 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.094019 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.094036 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.094047 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:07Z","lastTransitionTime":"2025-10-02T21:25:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.197320 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.197388 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.197407 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.197436 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.197458 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:07Z","lastTransitionTime":"2025-10-02T21:25:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.271813 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.271862 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.271874 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.271892 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.271908 4636 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T21:25:07Z","lastTransitionTime":"2025-10-02T21:25:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.328385 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv"] Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.328882 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.332226 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.332636 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.333266 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.334390 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.361720 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=15.361684195 podStartE2EDuration="15.361684195s" podCreationTimestamp="2025-10-02 21:24:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:07.34744249 +0000 UTC m=+98.670450529" watchObservedRunningTime="2025-10-02 21:25:07.361684195 +0000 UTC m=+98.684692244" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.379136 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=77.379108384 podStartE2EDuration="1m17.379108384s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:07.37895796 +0000 UTC m=+98.701966009" watchObservedRunningTime="2025-10-02 21:25:07.379108384 +0000 UTC m=+98.702116413" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.407050 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=8.40702987 podStartE2EDuration="8.40702987s" podCreationTimestamp="2025-10-02 21:24:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:07.405662094 +0000 UTC m=+98.728670113" watchObservedRunningTime="2025-10-02 21:25:07.40702987 +0000 UTC m=+98.730037889" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.446577 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33203122-ce7b-4dd4-bd5b-2a7552cb2648-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.446896 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/33203122-ce7b-4dd4-bd5b-2a7552cb2648-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.447655 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33203122-ce7b-4dd4-bd5b-2a7552cb2648-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.447819 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33203122-ce7b-4dd4-bd5b-2a7552cb2648-service-ca\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.447941 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/33203122-ce7b-4dd4-bd5b-2a7552cb2648-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.447974 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=78.447951089 podStartE2EDuration="1m18.447951089s" podCreationTimestamp="2025-10-02 21:23:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:07.447464186 +0000 UTC m=+98.770472205" watchObservedRunningTime="2025-10-02 21:25:07.447951089 +0000 UTC m=+98.770959138" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.548838 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33203122-ce7b-4dd4-bd5b-2a7552cb2648-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.549333 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/33203122-ce7b-4dd4-bd5b-2a7552cb2648-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.549455 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33203122-ce7b-4dd4-bd5b-2a7552cb2648-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.549558 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33203122-ce7b-4dd4-bd5b-2a7552cb2648-service-ca\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.549647 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/33203122-ce7b-4dd4-bd5b-2a7552cb2648-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.549805 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/33203122-ce7b-4dd4-bd5b-2a7552cb2648-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.550837 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/33203122-ce7b-4dd4-bd5b-2a7552cb2648-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.551576 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33203122-ce7b-4dd4-bd5b-2a7552cb2648-service-ca\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.563068 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33203122-ce7b-4dd4-bd5b-2a7552cb2648-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.565291 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podStartSLOduration=77.565274052 podStartE2EDuration="1m17.565274052s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:07.513485707 +0000 UTC m=+98.836493726" watchObservedRunningTime="2025-10-02 21:25:07.565274052 +0000 UTC m=+98.888282071" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.583406 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=46.583389289 podStartE2EDuration="46.583389289s" podCreationTimestamp="2025-10-02 21:24:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:07.583296417 +0000 UTC m=+98.906304446" watchObservedRunningTime="2025-10-02 21:25:07.583389289 +0000 UTC m=+98.906397308" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.583633 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-9qm8w" podStartSLOduration=77.583627896 podStartE2EDuration="1m17.583627896s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:07.565775255 +0000 UTC m=+98.888783274" watchObservedRunningTime="2025-10-02 21:25:07.583627896 +0000 UTC m=+98.906635915" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.585120 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33203122-ce7b-4dd4-bd5b-2a7552cb2648-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-lxgvv\" (UID: \"33203122-ce7b-4dd4-bd5b-2a7552cb2648\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.603412 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:07 crc kubenswrapper[4636]: E1002 21:25:07.603514 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.603700 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:07 crc kubenswrapper[4636]: E1002 21:25:07.603767 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.603869 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:07 crc kubenswrapper[4636]: E1002 21:25:07.604007 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.629881 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-586cm" podStartSLOduration=78.629859784 podStartE2EDuration="1m18.629859784s" podCreationTimestamp="2025-10-02 21:23:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:07.629454124 +0000 UTC m=+98.952462143" watchObservedRunningTime="2025-10-02 21:25:07.629859784 +0000 UTC m=+98.952867803" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.645524 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.669285 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-895mm" podStartSLOduration=77.669265113 podStartE2EDuration="1m17.669265113s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:07.642024405 +0000 UTC m=+98.965032424" watchObservedRunningTime="2025-10-02 21:25:07.669265113 +0000 UTC m=+98.992273122" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.703031 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-2zt4n" podStartSLOduration=77.703005593 podStartE2EDuration="1m17.703005593s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:07.687008501 +0000 UTC m=+99.010016520" watchObservedRunningTime="2025-10-02 21:25:07.703005593 +0000 UTC m=+99.026013612" Oct 02 21:25:07 crc kubenswrapper[4636]: I1002 21:25:07.703637 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-kkf52" podStartSLOduration=76.703631899 podStartE2EDuration="1m16.703631899s" podCreationTimestamp="2025-10-02 21:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:07.702428497 +0000 UTC m=+99.025436516" watchObservedRunningTime="2025-10-02 21:25:07.703631899 +0000 UTC m=+99.026639918" Oct 02 21:25:08 crc kubenswrapper[4636]: I1002 21:25:08.243687 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" event={"ID":"33203122-ce7b-4dd4-bd5b-2a7552cb2648","Type":"ContainerStarted","Data":"df3f5b19abd6e05c69e4dc1567e0f281f10d23ac5aeeca29d849c004907e4c87"} Oct 02 21:25:08 crc kubenswrapper[4636]: I1002 21:25:08.244210 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" event={"ID":"33203122-ce7b-4dd4-bd5b-2a7552cb2648","Type":"ContainerStarted","Data":"ad67040db884795e264a01aec4471502a888e5d6432761f5602414548c399d88"} Oct 02 21:25:08 crc kubenswrapper[4636]: I1002 21:25:08.267631 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lxgvv" podStartSLOduration=78.267612056 podStartE2EDuration="1m18.267612056s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:08.26700684 +0000 UTC m=+99.590014899" watchObservedRunningTime="2025-10-02 21:25:08.267612056 +0000 UTC m=+99.590620075" Oct 02 21:25:08 crc kubenswrapper[4636]: I1002 21:25:08.603346 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:08 crc kubenswrapper[4636]: E1002 21:25:08.603508 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:08 crc kubenswrapper[4636]: I1002 21:25:08.966369 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:08 crc kubenswrapper[4636]: E1002 21:25:08.966583 4636 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:25:08 crc kubenswrapper[4636]: E1002 21:25:08.966896 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs podName:5e169ed7-2c2e-4623-9f21-330753911ab5 nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.966875229 +0000 UTC m=+164.289883268 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs") pod "network-metrics-daemon-zssg6" (UID: "5e169ed7-2c2e-4623-9f21-330753911ab5") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 21:25:09 crc kubenswrapper[4636]: I1002 21:25:09.603511 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:09 crc kubenswrapper[4636]: I1002 21:25:09.603628 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:09 crc kubenswrapper[4636]: I1002 21:25:09.603808 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:09 crc kubenswrapper[4636]: E1002 21:25:09.607177 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:09 crc kubenswrapper[4636]: E1002 21:25:09.607578 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:09 crc kubenswrapper[4636]: E1002 21:25:09.607691 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:10 crc kubenswrapper[4636]: I1002 21:25:10.603463 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:10 crc kubenswrapper[4636]: E1002 21:25:10.603625 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:11 crc kubenswrapper[4636]: I1002 21:25:11.603530 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:11 crc kubenswrapper[4636]: E1002 21:25:11.603743 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:11 crc kubenswrapper[4636]: I1002 21:25:11.604115 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:11 crc kubenswrapper[4636]: E1002 21:25:11.604211 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:11 crc kubenswrapper[4636]: I1002 21:25:11.604441 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:11 crc kubenswrapper[4636]: E1002 21:25:11.604530 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:12 crc kubenswrapper[4636]: I1002 21:25:12.603474 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:12 crc kubenswrapper[4636]: E1002 21:25:12.604111 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:13 crc kubenswrapper[4636]: I1002 21:25:13.602671 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:13 crc kubenswrapper[4636]: E1002 21:25:13.603003 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:13 crc kubenswrapper[4636]: I1002 21:25:13.602692 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:13 crc kubenswrapper[4636]: E1002 21:25:13.603205 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:13 crc kubenswrapper[4636]: I1002 21:25:13.602679 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:13 crc kubenswrapper[4636]: E1002 21:25:13.603354 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:14 crc kubenswrapper[4636]: I1002 21:25:14.603025 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:14 crc kubenswrapper[4636]: E1002 21:25:14.603298 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:15 crc kubenswrapper[4636]: I1002 21:25:15.604113 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:15 crc kubenswrapper[4636]: I1002 21:25:15.604683 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:15 crc kubenswrapper[4636]: E1002 21:25:15.604675 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:15 crc kubenswrapper[4636]: I1002 21:25:15.604879 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:15 crc kubenswrapper[4636]: E1002 21:25:15.605174 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:15 crc kubenswrapper[4636]: E1002 21:25:15.605507 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:16 crc kubenswrapper[4636]: I1002 21:25:16.603326 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:16 crc kubenswrapper[4636]: E1002 21:25:16.603585 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:17 crc kubenswrapper[4636]: I1002 21:25:17.603905 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:17 crc kubenswrapper[4636]: I1002 21:25:17.603954 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:17 crc kubenswrapper[4636]: I1002 21:25:17.603985 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:17 crc kubenswrapper[4636]: E1002 21:25:17.604099 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:17 crc kubenswrapper[4636]: E1002 21:25:17.604182 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:17 crc kubenswrapper[4636]: E1002 21:25:17.604338 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:18 crc kubenswrapper[4636]: I1002 21:25:18.603535 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:18 crc kubenswrapper[4636]: E1002 21:25:18.603986 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:19 crc kubenswrapper[4636]: I1002 21:25:19.602934 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:19 crc kubenswrapper[4636]: I1002 21:25:19.603017 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:19 crc kubenswrapper[4636]: E1002 21:25:19.603084 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:19 crc kubenswrapper[4636]: E1002 21:25:19.603209 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:19 crc kubenswrapper[4636]: I1002 21:25:19.603408 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:19 crc kubenswrapper[4636]: E1002 21:25:19.605402 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:19 crc kubenswrapper[4636]: I1002 21:25:19.606298 4636 scope.go:117] "RemoveContainer" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:25:19 crc kubenswrapper[4636]: E1002 21:25:19.606466 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" Oct 02 21:25:20 crc kubenswrapper[4636]: I1002 21:25:20.603077 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:20 crc kubenswrapper[4636]: E1002 21:25:20.603206 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:21 crc kubenswrapper[4636]: I1002 21:25:21.603551 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:21 crc kubenswrapper[4636]: I1002 21:25:21.603678 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:21 crc kubenswrapper[4636]: I1002 21:25:21.603688 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:21 crc kubenswrapper[4636]: E1002 21:25:21.603817 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:21 crc kubenswrapper[4636]: E1002 21:25:21.603953 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:21 crc kubenswrapper[4636]: E1002 21:25:21.604030 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:22 crc kubenswrapper[4636]: I1002 21:25:22.603167 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:22 crc kubenswrapper[4636]: E1002 21:25:22.603410 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:23 crc kubenswrapper[4636]: I1002 21:25:23.603397 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:23 crc kubenswrapper[4636]: I1002 21:25:23.603466 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:23 crc kubenswrapper[4636]: I1002 21:25:23.603399 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:23 crc kubenswrapper[4636]: E1002 21:25:23.603615 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:23 crc kubenswrapper[4636]: E1002 21:25:23.603720 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:23 crc kubenswrapper[4636]: E1002 21:25:23.603865 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:24 crc kubenswrapper[4636]: I1002 21:25:24.602835 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:24 crc kubenswrapper[4636]: E1002 21:25:24.603216 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:25 crc kubenswrapper[4636]: I1002 21:25:25.603787 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:25 crc kubenswrapper[4636]: I1002 21:25:25.603861 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:25 crc kubenswrapper[4636]: E1002 21:25:25.603921 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:25 crc kubenswrapper[4636]: E1002 21:25:25.604033 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:25 crc kubenswrapper[4636]: I1002 21:25:25.605277 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:25 crc kubenswrapper[4636]: E1002 21:25:25.605660 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:26 crc kubenswrapper[4636]: I1002 21:25:26.603491 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:26 crc kubenswrapper[4636]: E1002 21:25:26.604042 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:27 crc kubenswrapper[4636]: I1002 21:25:27.603843 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:27 crc kubenswrapper[4636]: I1002 21:25:27.603864 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:27 crc kubenswrapper[4636]: E1002 21:25:27.604879 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:27 crc kubenswrapper[4636]: I1002 21:25:27.604018 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:27 crc kubenswrapper[4636]: E1002 21:25:27.605067 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:27 crc kubenswrapper[4636]: E1002 21:25:27.604619 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:28 crc kubenswrapper[4636]: I1002 21:25:28.603039 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:28 crc kubenswrapper[4636]: E1002 21:25:28.603609 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:29 crc kubenswrapper[4636]: I1002 21:25:29.328726 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-895mm_3a64b152-90d7-4dd0-be73-17e987476a1c/kube-multus/1.log" Oct 02 21:25:29 crc kubenswrapper[4636]: I1002 21:25:29.332935 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-895mm_3a64b152-90d7-4dd0-be73-17e987476a1c/kube-multus/0.log" Oct 02 21:25:29 crc kubenswrapper[4636]: I1002 21:25:29.333157 4636 generic.go:334] "Generic (PLEG): container finished" podID="3a64b152-90d7-4dd0-be73-17e987476a1c" containerID="b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d" exitCode=1 Oct 02 21:25:29 crc kubenswrapper[4636]: I1002 21:25:29.333327 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-895mm" event={"ID":"3a64b152-90d7-4dd0-be73-17e987476a1c","Type":"ContainerDied","Data":"b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d"} Oct 02 21:25:29 crc kubenswrapper[4636]: I1002 21:25:29.333532 4636 scope.go:117] "RemoveContainer" containerID="93f51059296ad0d425bbc0bcea1f737eec9e0b34e32af68ae3c409fa064eece1" Oct 02 21:25:29 crc kubenswrapper[4636]: I1002 21:25:29.334345 4636 scope.go:117] "RemoveContainer" containerID="b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d" Oct 02 21:25:29 crc kubenswrapper[4636]: E1002 21:25:29.334794 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-895mm_openshift-multus(3a64b152-90d7-4dd0-be73-17e987476a1c)\"" pod="openshift-multus/multus-895mm" podUID="3a64b152-90d7-4dd0-be73-17e987476a1c" Oct 02 21:25:29 crc kubenswrapper[4636]: I1002 21:25:29.602884 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:29 crc kubenswrapper[4636]: I1002 21:25:29.602922 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:29 crc kubenswrapper[4636]: E1002 21:25:29.604253 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:29 crc kubenswrapper[4636]: I1002 21:25:29.604292 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:29 crc kubenswrapper[4636]: E1002 21:25:29.604628 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:29 crc kubenswrapper[4636]: E1002 21:25:29.604680 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:29 crc kubenswrapper[4636]: E1002 21:25:29.630512 4636 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 02 21:25:29 crc kubenswrapper[4636]: E1002 21:25:29.723830 4636 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 02 21:25:30 crc kubenswrapper[4636]: I1002 21:25:30.340529 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-895mm_3a64b152-90d7-4dd0-be73-17e987476a1c/kube-multus/1.log" Oct 02 21:25:30 crc kubenswrapper[4636]: I1002 21:25:30.602666 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:30 crc kubenswrapper[4636]: E1002 21:25:30.602897 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:31 crc kubenswrapper[4636]: I1002 21:25:31.603460 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:31 crc kubenswrapper[4636]: E1002 21:25:31.603579 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:31 crc kubenswrapper[4636]: I1002 21:25:31.603785 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:31 crc kubenswrapper[4636]: E1002 21:25:31.603833 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:31 crc kubenswrapper[4636]: I1002 21:25:31.603939 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:31 crc kubenswrapper[4636]: E1002 21:25:31.604029 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:32 crc kubenswrapper[4636]: I1002 21:25:32.603857 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:32 crc kubenswrapper[4636]: E1002 21:25:32.604400 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:32 crc kubenswrapper[4636]: I1002 21:25:32.604768 4636 scope.go:117] "RemoveContainer" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:25:32 crc kubenswrapper[4636]: E1002 21:25:32.604987 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7qm8_openshift-ovn-kubernetes(db86cff1-cf8b-4043-bbd7-c41ea2b72ad6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" Oct 02 21:25:33 crc kubenswrapper[4636]: I1002 21:25:33.603459 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:33 crc kubenswrapper[4636]: I1002 21:25:33.603459 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:33 crc kubenswrapper[4636]: I1002 21:25:33.603553 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:33 crc kubenswrapper[4636]: E1002 21:25:33.604215 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:33 crc kubenswrapper[4636]: E1002 21:25:33.604407 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:33 crc kubenswrapper[4636]: E1002 21:25:33.604708 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:34 crc kubenswrapper[4636]: I1002 21:25:34.603017 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:34 crc kubenswrapper[4636]: E1002 21:25:34.603289 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:34 crc kubenswrapper[4636]: E1002 21:25:34.725366 4636 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 02 21:25:35 crc kubenswrapper[4636]: I1002 21:25:35.603297 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:35 crc kubenswrapper[4636]: I1002 21:25:35.603347 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:35 crc kubenswrapper[4636]: E1002 21:25:35.603581 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:35 crc kubenswrapper[4636]: E1002 21:25:35.603739 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:35 crc kubenswrapper[4636]: I1002 21:25:35.604055 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:35 crc kubenswrapper[4636]: E1002 21:25:35.604279 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:36 crc kubenswrapper[4636]: I1002 21:25:36.603362 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:36 crc kubenswrapper[4636]: E1002 21:25:36.603564 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:37 crc kubenswrapper[4636]: I1002 21:25:37.602821 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:37 crc kubenswrapper[4636]: I1002 21:25:37.602910 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:37 crc kubenswrapper[4636]: E1002 21:25:37.603026 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:37 crc kubenswrapper[4636]: I1002 21:25:37.602839 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:37 crc kubenswrapper[4636]: E1002 21:25:37.603262 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:37 crc kubenswrapper[4636]: E1002 21:25:37.603346 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:38 crc kubenswrapper[4636]: I1002 21:25:38.603128 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:38 crc kubenswrapper[4636]: E1002 21:25:38.603413 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:39 crc kubenswrapper[4636]: I1002 21:25:39.603455 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:39 crc kubenswrapper[4636]: I1002 21:25:39.603455 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:39 crc kubenswrapper[4636]: E1002 21:25:39.604446 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:39 crc kubenswrapper[4636]: I1002 21:25:39.604463 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:39 crc kubenswrapper[4636]: E1002 21:25:39.604714 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:39 crc kubenswrapper[4636]: E1002 21:25:39.604663 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:39 crc kubenswrapper[4636]: E1002 21:25:39.727283 4636 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 02 21:25:40 crc kubenswrapper[4636]: I1002 21:25:40.603377 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:40 crc kubenswrapper[4636]: E1002 21:25:40.603549 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:41 crc kubenswrapper[4636]: I1002 21:25:41.603473 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:41 crc kubenswrapper[4636]: I1002 21:25:41.603527 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:41 crc kubenswrapper[4636]: I1002 21:25:41.603474 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:41 crc kubenswrapper[4636]: E1002 21:25:41.603713 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:41 crc kubenswrapper[4636]: E1002 21:25:41.603904 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:41 crc kubenswrapper[4636]: E1002 21:25:41.604120 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:42 crc kubenswrapper[4636]: I1002 21:25:42.603002 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:42 crc kubenswrapper[4636]: E1002 21:25:42.603188 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:43 crc kubenswrapper[4636]: I1002 21:25:43.603510 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:43 crc kubenswrapper[4636]: I1002 21:25:43.603585 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:43 crc kubenswrapper[4636]: E1002 21:25:43.603791 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:43 crc kubenswrapper[4636]: I1002 21:25:43.603816 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:43 crc kubenswrapper[4636]: E1002 21:25:43.604002 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:43 crc kubenswrapper[4636]: E1002 21:25:43.604381 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:43 crc kubenswrapper[4636]: I1002 21:25:43.604647 4636 scope.go:117] "RemoveContainer" containerID="b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d" Oct 02 21:25:44 crc kubenswrapper[4636]: I1002 21:25:44.397542 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-895mm_3a64b152-90d7-4dd0-be73-17e987476a1c/kube-multus/1.log" Oct 02 21:25:44 crc kubenswrapper[4636]: I1002 21:25:44.398076 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-895mm" event={"ID":"3a64b152-90d7-4dd0-be73-17e987476a1c","Type":"ContainerStarted","Data":"1a2ea439df06f9806eab9afc3c9eafa2569042c5f9505062da564e81d1d281ff"} Oct 02 21:25:44 crc kubenswrapper[4636]: I1002 21:25:44.603007 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:44 crc kubenswrapper[4636]: E1002 21:25:44.603185 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:44 crc kubenswrapper[4636]: E1002 21:25:44.728705 4636 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 02 21:25:45 crc kubenswrapper[4636]: I1002 21:25:45.602945 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:45 crc kubenswrapper[4636]: I1002 21:25:45.602984 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:45 crc kubenswrapper[4636]: I1002 21:25:45.603075 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:45 crc kubenswrapper[4636]: E1002 21:25:45.603104 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:45 crc kubenswrapper[4636]: E1002 21:25:45.603254 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:45 crc kubenswrapper[4636]: E1002 21:25:45.603369 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:46 crc kubenswrapper[4636]: I1002 21:25:46.602988 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:46 crc kubenswrapper[4636]: E1002 21:25:46.603134 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:47 crc kubenswrapper[4636]: I1002 21:25:47.603411 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:47 crc kubenswrapper[4636]: I1002 21:25:47.603439 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:47 crc kubenswrapper[4636]: E1002 21:25:47.603626 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:47 crc kubenswrapper[4636]: I1002 21:25:47.603675 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:47 crc kubenswrapper[4636]: E1002 21:25:47.603942 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:47 crc kubenswrapper[4636]: E1002 21:25:47.604050 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:47 crc kubenswrapper[4636]: I1002 21:25:47.605139 4636 scope.go:117] "RemoveContainer" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:25:48 crc kubenswrapper[4636]: I1002 21:25:48.411136 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/3.log" Oct 02 21:25:48 crc kubenswrapper[4636]: I1002 21:25:48.413403 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerStarted","Data":"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492"} Oct 02 21:25:48 crc kubenswrapper[4636]: I1002 21:25:48.413863 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:25:48 crc kubenswrapper[4636]: I1002 21:25:48.603429 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:48 crc kubenswrapper[4636]: E1002 21:25:48.603669 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:48 crc kubenswrapper[4636]: I1002 21:25:48.716323 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podStartSLOduration=118.716295155 podStartE2EDuration="1m58.716295155s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:25:48.439322432 +0000 UTC m=+139.762330461" watchObservedRunningTime="2025-10-02 21:25:48.716295155 +0000 UTC m=+140.039303214" Oct 02 21:25:48 crc kubenswrapper[4636]: I1002 21:25:48.716820 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zssg6"] Oct 02 21:25:49 crc kubenswrapper[4636]: I1002 21:25:49.419467 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:49 crc kubenswrapper[4636]: E1002 21:25:49.419702 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:49 crc kubenswrapper[4636]: I1002 21:25:49.603003 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:49 crc kubenswrapper[4636]: I1002 21:25:49.603003 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:49 crc kubenswrapper[4636]: I1002 21:25:49.603090 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:49 crc kubenswrapper[4636]: E1002 21:25:49.603834 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:49 crc kubenswrapper[4636]: E1002 21:25:49.604598 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:49 crc kubenswrapper[4636]: E1002 21:25:49.604672 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:49 crc kubenswrapper[4636]: E1002 21:25:49.729300 4636 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 02 21:25:51 crc kubenswrapper[4636]: I1002 21:25:51.603236 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:51 crc kubenswrapper[4636]: E1002 21:25:51.603424 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:51 crc kubenswrapper[4636]: I1002 21:25:51.603441 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:51 crc kubenswrapper[4636]: I1002 21:25:51.603524 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:51 crc kubenswrapper[4636]: I1002 21:25:51.603433 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:51 crc kubenswrapper[4636]: E1002 21:25:51.603620 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:51 crc kubenswrapper[4636]: E1002 21:25:51.603745 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:51 crc kubenswrapper[4636]: E1002 21:25:51.604032 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:53 crc kubenswrapper[4636]: I1002 21:25:53.117547 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:25:53 crc kubenswrapper[4636]: I1002 21:25:53.117644 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:25:53 crc kubenswrapper[4636]: I1002 21:25:53.603722 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:53 crc kubenswrapper[4636]: I1002 21:25:53.603806 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:53 crc kubenswrapper[4636]: E1002 21:25:53.603890 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 21:25:53 crc kubenswrapper[4636]: E1002 21:25:53.604037 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zssg6" podUID="5e169ed7-2c2e-4623-9f21-330753911ab5" Oct 02 21:25:53 crc kubenswrapper[4636]: I1002 21:25:53.604155 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:53 crc kubenswrapper[4636]: E1002 21:25:53.604221 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 21:25:53 crc kubenswrapper[4636]: I1002 21:25:53.604258 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:53 crc kubenswrapper[4636]: E1002 21:25:53.604329 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 21:25:55 crc kubenswrapper[4636]: I1002 21:25:55.603383 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:55 crc kubenswrapper[4636]: I1002 21:25:55.603455 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:55 crc kubenswrapper[4636]: I1002 21:25:55.603395 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:55 crc kubenswrapper[4636]: I1002 21:25:55.603667 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:25:55 crc kubenswrapper[4636]: I1002 21:25:55.606590 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 02 21:25:55 crc kubenswrapper[4636]: I1002 21:25:55.607219 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 02 21:25:55 crc kubenswrapper[4636]: I1002 21:25:55.607290 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 02 21:25:55 crc kubenswrapper[4636]: I1002 21:25:55.607520 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 02 21:25:55 crc kubenswrapper[4636]: I1002 21:25:55.607714 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 02 21:25:55 crc kubenswrapper[4636]: I1002 21:25:55.607874 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 02 21:25:57 crc kubenswrapper[4636]: I1002 21:25:57.743865 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:25:57 crc kubenswrapper[4636]: E1002 21:25:57.744116 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:27:59.744083588 +0000 UTC m=+271.067091647 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:25:57 crc kubenswrapper[4636]: I1002 21:25:57.845709 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:57 crc kubenswrapper[4636]: I1002 21:25:57.846152 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:57 crc kubenswrapper[4636]: I1002 21:25:57.846358 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:57 crc kubenswrapper[4636]: I1002 21:25:57.846577 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:57 crc kubenswrapper[4636]: I1002 21:25:57.848061 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:57 crc kubenswrapper[4636]: I1002 21:25:57.856855 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:57 crc kubenswrapper[4636]: I1002 21:25:57.857865 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:57 crc kubenswrapper[4636]: I1002 21:25:57.860460 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.032556 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.049914 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.063091 4636 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.065041 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.117820 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.122165 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.129729 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.133624 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.135164 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.135649 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.135903 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-n26j2"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.138525 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.138758 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.138837 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.138897 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.169084 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-n26j2" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.170355 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c385fb47-f4e5-4934-a44e-6ee2caed0450-etcd-client\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.170473 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pknjm\" (UniqueName: \"kubernetes.io/projected/c385fb47-f4e5-4934-a44e-6ee2caed0450-kube-api-access-pknjm\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.170624 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c385fb47-f4e5-4934-a44e-6ee2caed0450-encryption-config\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.170697 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c385fb47-f4e5-4934-a44e-6ee2caed0450-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.170784 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c385fb47-f4e5-4934-a44e-6ee2caed0450-serving-cert\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.170864 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c385fb47-f4e5-4934-a44e-6ee2caed0450-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.170943 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c385fb47-f4e5-4934-a44e-6ee2caed0450-audit-policies\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.171009 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c385fb47-f4e5-4934-a44e-6ee2caed0450-audit-dir\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.173418 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.173903 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6fxnn"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.174155 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.174229 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.174528 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.174626 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.176154 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.176166 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-497mz"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.180556 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-84tsd"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.180952 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rklgn"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.181215 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-wdwwc"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.181475 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lnls5"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.182652 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.183276 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.178610 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.183913 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.185654 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.190605 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.190909 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.191264 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.191608 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.191894 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.192186 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.192466 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.192542 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.192645 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.192880 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.193099 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.193315 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.193448 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.193568 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.200139 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76lrr"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.200574 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.200918 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.201126 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.201355 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.201580 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.201903 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.202255 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-gbfdn"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.202440 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.202857 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.203305 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.203449 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.203558 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.203654 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.203742 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.211358 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.204277 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.211629 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.217087 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.204364 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.211592 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.217479 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.204446 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.204487 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.204614 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.204647 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.204684 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.218066 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.204855 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.204949 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205003 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205076 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205326 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205362 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205391 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.218729 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.219426 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205422 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.222115 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205452 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205485 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205515 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205546 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205636 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.205671 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.209937 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.210016 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.203658 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.221631 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.222276 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.221798 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.222760 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.221855 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.222889 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.221914 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.223058 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.221944 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.237135 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.221973 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.222014 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.240113 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.240562 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.245559 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.246170 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.246810 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.257060 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.257215 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.257405 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.281603 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.281744 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.281836 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.281919 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.282003 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.282092 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.282145 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.282229 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.282363 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.282742 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.283199 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.284334 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286003 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-84tsd"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286219 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286289 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286316 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-client-ca\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286336 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14baf1ce-9a21-4232-a0b0-3ad606d31e45-serving-cert\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286356 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6faf1e4-6165-479e-95a2-25e37852f252-config\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286375 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-audit\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286427 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31f15e24-8b0d-4f12-b71f-29565e66b0ca-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mnt7l\" (UID: \"31f15e24-8b0d-4f12-b71f-29565e66b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286447 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlsr8\" (UniqueName: \"kubernetes.io/projected/31f15e24-8b0d-4f12-b71f-29565e66b0ca-kube-api-access-wlsr8\") pod \"openshift-apiserver-operator-796bbdcf4f-mnt7l\" (UID: \"31f15e24-8b0d-4f12-b71f-29565e66b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286469 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286488 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5hgp\" (UniqueName: \"kubernetes.io/projected/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-kube-api-access-k5hgp\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286504 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b599dd60-130f-4b26-b87d-7df6b14d2d7e-config\") pod \"kube-controller-manager-operator-78b949d7b-zzldq\" (UID: \"b599dd60-130f-4b26-b87d-7df6b14d2d7e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286520 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-config\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286605 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/878328eb-ad2c-4cd7-aff4-19d12588b46d-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286626 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-dir\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286645 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-oauth-config\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286666 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5392cef5-bba8-474f-a355-a505f056b6a5-default-certificate\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286682 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-trusted-ca\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286699 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-machine-approver-tls\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286717 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286735 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286769 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-node-pullsecrets\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286790 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.286792 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-etcd-client\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287025 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-encryption-config\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287052 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-etcd-serving-ca\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287072 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31f15e24-8b0d-4f12-b71f-29565e66b0ca-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mnt7l\" (UID: \"31f15e24-8b0d-4f12-b71f-29565e66b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287100 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6faf1e4-6165-479e-95a2-25e37852f252-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287120 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d53d9154-d213-435c-9b1c-ae798dcfc3e9-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bdjjn\" (UID: \"d53d9154-d213-435c-9b1c-ae798dcfc3e9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287144 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctr9b\" (UniqueName: \"kubernetes.io/projected/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-kube-api-access-ctr9b\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287272 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/14baf1ce-9a21-4232-a0b0-3ad606d31e45-trusted-ca\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287302 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cf2410ff-f182-4416-ba76-49b77dfcce3a-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cm9hg\" (UID: \"cf2410ff-f182-4416-ba76-49b77dfcce3a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287323 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-metrics-tls\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287359 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jznsc\" (UniqueName: \"kubernetes.io/projected/4dec7d6d-2309-486c-bebe-19dce69f40d8-kube-api-access-jznsc\") pod \"downloads-7954f5f757-n26j2\" (UID: \"4dec7d6d-2309-486c-bebe-19dce69f40d8\") " pod="openshift-console/downloads-7954f5f757-n26j2" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287386 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m62bl\" (UniqueName: \"kubernetes.io/projected/cf2410ff-f182-4416-ba76-49b77dfcce3a-kube-api-access-m62bl\") pod \"openshift-config-operator-7777fb866f-cm9hg\" (UID: \"cf2410ff-f182-4416-ba76-49b77dfcce3a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287408 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b599dd60-130f-4b26-b87d-7df6b14d2d7e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zzldq\" (UID: \"b599dd60-130f-4b26-b87d-7df6b14d2d7e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287425 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gwtr\" (UniqueName: \"kubernetes.io/projected/878328eb-ad2c-4cd7-aff4-19d12588b46d-kube-api-access-2gwtr\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287444 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287473 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvxvs\" (UniqueName: \"kubernetes.io/projected/d6faf1e4-6165-479e-95a2-25e37852f252-kube-api-access-xvxvs\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287505 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g6cw\" (UniqueName: \"kubernetes.io/projected/9a4a1b44-649f-4956-81fc-8be2fb503d7b-kube-api-access-7g6cw\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287594 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287622 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c385fb47-f4e5-4934-a44e-6ee2caed0450-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287656 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-audit-dir\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287674 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2766a44-24c7-48ff-943f-3a225eb74dec-serving-cert\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287702 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5392cef5-bba8-474f-a355-a505f056b6a5-service-ca-bundle\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287722 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c385fb47-f4e5-4934-a44e-6ee2caed0450-audit-dir\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287787 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1c0a8f40-cffd-4cac-b2c5-4d76fa89f611-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jc6sn\" (UID: \"1c0a8f40-cffd-4cac-b2c5-4d76fa89f611\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287812 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287841 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14baf1ce-9a21-4232-a0b0-3ad606d31e45-config\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287859 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-image-import-ca\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287883 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6g99\" (UniqueName: \"kubernetes.io/projected/ffb6abfd-3376-4908-b388-7c398e36f986-kube-api-access-x6g99\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287942 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c385fb47-f4e5-4934-a44e-6ee2caed0450-audit-dir\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.287946 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288010 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d53d9154-d213-435c-9b1c-ae798dcfc3e9-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bdjjn\" (UID: \"d53d9154-d213-435c-9b1c-ae798dcfc3e9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288034 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288056 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twvlw\" (UniqueName: \"kubernetes.io/projected/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-kube-api-access-twvlw\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288077 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d53d9154-d213-435c-9b1c-ae798dcfc3e9-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bdjjn\" (UID: \"d53d9154-d213-435c-9b1c-ae798dcfc3e9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288109 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-policies\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288136 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288196 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6djb9\" (UniqueName: \"kubernetes.io/projected/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-kube-api-access-6djb9\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288225 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c385fb47-f4e5-4934-a44e-6ee2caed0450-etcd-client\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288228 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c385fb47-f4e5-4934-a44e-6ee2caed0450-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288246 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pknjm\" (UniqueName: \"kubernetes.io/projected/c385fb47-f4e5-4934-a44e-6ee2caed0450-kube-api-access-pknjm\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288269 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf2410ff-f182-4416-ba76-49b77dfcce3a-serving-cert\") pod \"openshift-config-operator-7777fb866f-cm9hg\" (UID: \"cf2410ff-f182-4416-ba76-49b77dfcce3a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288581 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9lcr\" (UniqueName: \"kubernetes.io/projected/5392cef5-bba8-474f-a355-a505f056b6a5-kube-api-access-z9lcr\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288700 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/878328eb-ad2c-4cd7-aff4-19d12588b46d-images\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288794 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9a4a1b44-649f-4956-81fc-8be2fb503d7b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288890 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c385fb47-f4e5-4934-a44e-6ee2caed0450-encryption-config\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.288969 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-config\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289040 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-config\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289114 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5392cef5-bba8-474f-a355-a505f056b6a5-stats-auth\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289181 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxpkb\" (UniqueName: \"kubernetes.io/projected/1c0a8f40-cffd-4cac-b2c5-4d76fa89f611-kube-api-access-pxpkb\") pod \"cluster-samples-operator-665b6dd947-jc6sn\" (UID: \"1c0a8f40-cffd-4cac-b2c5-4d76fa89f611\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289250 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289323 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4m8z\" (UniqueName: \"kubernetes.io/projected/776d4066-e52b-45f4-8d1c-eaad48feabc9-kube-api-access-w4m8z\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289396 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-oauth-serving-cert\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289463 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gnsc\" (UniqueName: \"kubernetes.io/projected/14baf1ce-9a21-4232-a0b0-3ad606d31e45-kube-api-access-4gnsc\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289528 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-config\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289594 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9a4a1b44-649f-4956-81fc-8be2fb503d7b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289945 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c385fb47-f4e5-4934-a44e-6ee2caed0450-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289977 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-service-ca\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.289995 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290013 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzd7b\" (UniqueName: \"kubernetes.io/projected/b2766a44-24c7-48ff-943f-3a225eb74dec-kube-api-access-bzd7b\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290052 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5392cef5-bba8-474f-a355-a505f056b6a5-metrics-certs\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290070 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6faf1e4-6165-479e-95a2-25e37852f252-service-ca-bundle\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290087 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-images\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290105 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b599dd60-130f-4b26-b87d-7df6b14d2d7e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zzldq\" (UID: \"b599dd60-130f-4b26-b87d-7df6b14d2d7e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290123 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-config\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290142 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9a4a1b44-649f-4956-81fc-8be2fb503d7b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290162 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290183 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c385fb47-f4e5-4934-a44e-6ee2caed0450-serving-cert\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290200 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-trusted-ca-bundle\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290222 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-serving-cert\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290237 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290252 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgnqz\" (UniqueName: \"kubernetes.io/projected/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-kube-api-access-pgnqz\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290266 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffb6abfd-3376-4908-b388-7c398e36f986-serving-cert\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290284 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-auth-proxy-config\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290307 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6faf1e4-6165-479e-95a2-25e37852f252-serving-cert\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290322 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-serving-cert\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290338 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290353 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/878328eb-ad2c-4cd7-aff4-19d12588b46d-proxy-tls\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290372 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c385fb47-f4e5-4934-a44e-6ee2caed0450-audit-policies\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290387 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-config\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.290401 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-client-ca\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.297024 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.298709 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.299466 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c385fb47-f4e5-4934-a44e-6ee2caed0450-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.299908 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.300060 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.301599 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.301868 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.302037 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.302189 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.305734 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c385fb47-f4e5-4934-a44e-6ee2caed0450-audit-policies\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.306095 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.306280 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.306402 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.306887 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.312304 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.312485 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.312638 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.312811 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.315485 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.327000 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c385fb47-f4e5-4934-a44e-6ee2caed0450-serving-cert\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.327070 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.327472 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.327644 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.327761 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.327942 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.327947 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.328599 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.334351 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.336089 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.338514 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c385fb47-f4e5-4934-a44e-6ee2caed0450-etcd-client\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.341121 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mc647"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.341899 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mc647" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.343525 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.350155 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-xfzw2"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.350853 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.351769 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.353108 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.360239 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.362165 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.362384 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.372685 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c385fb47-f4e5-4934-a44e-6ee2caed0450-encryption-config\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.374437 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-w89sw"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.375402 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.377661 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.378372 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.378679 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.391269 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.395696 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.419477 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6faf1e4-6165-479e-95a2-25e37852f252-serving-cert\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.419886 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-serving-cert\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.419906 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-auth-proxy-config\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.419929 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.419953 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/878328eb-ad2c-4cd7-aff4-19d12588b46d-proxy-tls\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.419980 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6154a418-bf83-481b-9e04-e870a95548db-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-zr76q\" (UID: \"6154a418-bf83-481b-9e04-e870a95548db\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420001 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thksq\" (UniqueName: \"kubernetes.io/projected/1c6c38fc-98a5-4280-ab21-f967146f3edc-kube-api-access-thksq\") pod \"machine-config-controller-84d6567774-mt7vm\" (UID: \"1c6c38fc-98a5-4280-ab21-f967146f3edc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420022 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-config\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420040 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-client-ca\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420059 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420077 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-client-ca\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420095 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-audit\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420112 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31f15e24-8b0d-4f12-b71f-29565e66b0ca-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mnt7l\" (UID: \"31f15e24-8b0d-4f12-b71f-29565e66b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420129 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlsr8\" (UniqueName: \"kubernetes.io/projected/31f15e24-8b0d-4f12-b71f-29565e66b0ca-kube-api-access-wlsr8\") pod \"openshift-apiserver-operator-796bbdcf4f-mnt7l\" (UID: \"31f15e24-8b0d-4f12-b71f-29565e66b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420146 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14baf1ce-9a21-4232-a0b0-3ad606d31e45-serving-cert\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420163 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6faf1e4-6165-479e-95a2-25e37852f252-config\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420184 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420209 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/97a981ff-12be-4043-afb9-d8c9fa7ef9d5-metrics-tls\") pod \"dns-operator-744455d44c-mc647\" (UID: \"97a981ff-12be-4043-afb9-d8c9fa7ef9d5\") " pod="openshift-dns-operator/dns-operator-744455d44c-mc647" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420233 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5hgp\" (UniqueName: \"kubernetes.io/projected/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-kube-api-access-k5hgp\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420253 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b599dd60-130f-4b26-b87d-7df6b14d2d7e-config\") pod \"kube-controller-manager-operator-78b949d7b-zzldq\" (UID: \"b599dd60-130f-4b26-b87d-7df6b14d2d7e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420275 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-config\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420298 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6fe85c5e-41ae-4c6a-ad16-21f46ccee66e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-bsk9d\" (UID: \"6fe85c5e-41ae-4c6a-ad16-21f46ccee66e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420324 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/878328eb-ad2c-4cd7-aff4-19d12588b46d-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420346 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5392cef5-bba8-474f-a355-a505f056b6a5-default-certificate\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420369 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-trusted-ca\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420387 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-machine-approver-tls\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420404 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-dir\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420422 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-oauth-config\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420439 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420459 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420483 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kwhl\" (UniqueName: \"kubernetes.io/projected/700f2044-4ee9-4adb-ab32-f41e84362b15-kube-api-access-2kwhl\") pod \"package-server-manager-789f6589d5-cf5tr\" (UID: \"700f2044-4ee9-4adb-ab32-f41e84362b15\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420506 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-etcd-client\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420573 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-encryption-config\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420604 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-node-pullsecrets\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420624 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-etcd-serving-ca\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420645 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31f15e24-8b0d-4f12-b71f-29565e66b0ca-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mnt7l\" (UID: \"31f15e24-8b0d-4f12-b71f-29565e66b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420663 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420665 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d53d9154-d213-435c-9b1c-ae798dcfc3e9-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bdjjn\" (UID: \"d53d9154-d213-435c-9b1c-ae798dcfc3e9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420899 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6faf1e4-6165-479e-95a2-25e37852f252-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420921 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/14baf1ce-9a21-4232-a0b0-3ad606d31e45-trusted-ca\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420940 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cf2410ff-f182-4416-ba76-49b77dfcce3a-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cm9hg\" (UID: \"cf2410ff-f182-4416-ba76-49b77dfcce3a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420957 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-metrics-tls\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420978 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctr9b\" (UniqueName: \"kubernetes.io/projected/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-kube-api-access-ctr9b\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420994 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m62bl\" (UniqueName: \"kubernetes.io/projected/cf2410ff-f182-4416-ba76-49b77dfcce3a-kube-api-access-m62bl\") pod \"openshift-config-operator-7777fb866f-cm9hg\" (UID: \"cf2410ff-f182-4416-ba76-49b77dfcce3a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421020 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl4vc\" (UniqueName: \"kubernetes.io/projected/6fe85c5e-41ae-4c6a-ad16-21f46ccee66e-kube-api-access-fl4vc\") pod \"kube-storage-version-migrator-operator-b67b599dd-bsk9d\" (UID: \"6fe85c5e-41ae-4c6a-ad16-21f46ccee66e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421043 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jznsc\" (UniqueName: \"kubernetes.io/projected/4dec7d6d-2309-486c-bebe-19dce69f40d8-kube-api-access-jznsc\") pod \"downloads-7954f5f757-n26j2\" (UID: \"4dec7d6d-2309-486c-bebe-19dce69f40d8\") " pod="openshift-console/downloads-7954f5f757-n26j2" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421060 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gwtr\" (UniqueName: \"kubernetes.io/projected/878328eb-ad2c-4cd7-aff4-19d12588b46d-kube-api-access-2gwtr\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421079 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421101 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b599dd60-130f-4b26-b87d-7df6b14d2d7e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zzldq\" (UID: \"b599dd60-130f-4b26-b87d-7df6b14d2d7e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421117 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvxvs\" (UniqueName: \"kubernetes.io/projected/d6faf1e4-6165-479e-95a2-25e37852f252-kube-api-access-xvxvs\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421134 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g6cw\" (UniqueName: \"kubernetes.io/projected/9a4a1b44-649f-4956-81fc-8be2fb503d7b-kube-api-access-7g6cw\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421152 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421175 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-audit-dir\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421192 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2766a44-24c7-48ff-943f-3a225eb74dec-serving-cert\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421210 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ljw6\" (UniqueName: \"kubernetes.io/projected/97a981ff-12be-4043-afb9-d8c9fa7ef9d5-kube-api-access-9ljw6\") pod \"dns-operator-744455d44c-mc647\" (UID: \"97a981ff-12be-4043-afb9-d8c9fa7ef9d5\") " pod="openshift-dns-operator/dns-operator-744455d44c-mc647" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421233 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/700f2044-4ee9-4adb-ab32-f41e84362b15-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-cf5tr\" (UID: \"700f2044-4ee9-4adb-ab32-f41e84362b15\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421267 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5392cef5-bba8-474f-a355-a505f056b6a5-service-ca-bundle\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421286 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1c0a8f40-cffd-4cac-b2c5-4d76fa89f611-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jc6sn\" (UID: \"1c0a8f40-cffd-4cac-b2c5-4d76fa89f611\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421311 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421328 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c6c38fc-98a5-4280-ab21-f967146f3edc-proxy-tls\") pod \"machine-config-controller-84d6567774-mt7vm\" (UID: \"1c6c38fc-98a5-4280-ab21-f967146f3edc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421356 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421375 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14baf1ce-9a21-4232-a0b0-3ad606d31e45-config\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421395 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-image-import-ca\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421414 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6g99\" (UniqueName: \"kubernetes.io/projected/ffb6abfd-3376-4908-b388-7c398e36f986-kube-api-access-x6g99\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421430 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421451 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d53d9154-d213-435c-9b1c-ae798dcfc3e9-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bdjjn\" (UID: \"d53d9154-d213-435c-9b1c-ae798dcfc3e9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421467 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d53d9154-d213-435c-9b1c-ae798dcfc3e9-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bdjjn\" (UID: \"d53d9154-d213-435c-9b1c-ae798dcfc3e9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421484 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-policies\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421502 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421520 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6fe85c5e-41ae-4c6a-ad16-21f46ccee66e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-bsk9d\" (UID: \"6fe85c5e-41ae-4c6a-ad16-21f46ccee66e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421541 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twvlw\" (UniqueName: \"kubernetes.io/projected/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-kube-api-access-twvlw\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421578 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6djb9\" (UniqueName: \"kubernetes.io/projected/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-kube-api-access-6djb9\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421601 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf2410ff-f182-4416-ba76-49b77dfcce3a-serving-cert\") pod \"openshift-config-operator-7777fb866f-cm9hg\" (UID: \"cf2410ff-f182-4416-ba76-49b77dfcce3a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421618 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9lcr\" (UniqueName: \"kubernetes.io/projected/5392cef5-bba8-474f-a355-a505f056b6a5-kube-api-access-z9lcr\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421635 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/878328eb-ad2c-4cd7-aff4-19d12588b46d-images\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421652 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jf2j\" (UniqueName: \"kubernetes.io/projected/6154a418-bf83-481b-9e04-e870a95548db-kube-api-access-6jf2j\") pod \"openshift-controller-manager-operator-756b6f6bc6-zr76q\" (UID: \"6154a418-bf83-481b-9e04-e870a95548db\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421678 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-config\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421697 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-config\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421714 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9a4a1b44-649f-4956-81fc-8be2fb503d7b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421736 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c6c38fc-98a5-4280-ab21-f967146f3edc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-mt7vm\" (UID: \"1c6c38fc-98a5-4280-ab21-f967146f3edc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421798 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxpkb\" (UniqueName: \"kubernetes.io/projected/1c0a8f40-cffd-4cac-b2c5-4d76fa89f611-kube-api-access-pxpkb\") pod \"cluster-samples-operator-665b6dd947-jc6sn\" (UID: \"1c0a8f40-cffd-4cac-b2c5-4d76fa89f611\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421818 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421835 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4m8z\" (UniqueName: \"kubernetes.io/projected/776d4066-e52b-45f4-8d1c-eaad48feabc9-kube-api-access-w4m8z\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421854 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5392cef5-bba8-474f-a355-a505f056b6a5-stats-auth\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421870 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gnsc\" (UniqueName: \"kubernetes.io/projected/14baf1ce-9a21-4232-a0b0-3ad606d31e45-kube-api-access-4gnsc\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421887 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-config\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421904 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9a4a1b44-649f-4956-81fc-8be2fb503d7b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421923 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-oauth-serving-cert\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421940 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-service-ca\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421957 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.421973 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6154a418-bf83-481b-9e04-e870a95548db-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-zr76q\" (UID: \"6154a418-bf83-481b-9e04-e870a95548db\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422018 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b599dd60-130f-4b26-b87d-7df6b14d2d7e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zzldq\" (UID: \"b599dd60-130f-4b26-b87d-7df6b14d2d7e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422041 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-config\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422058 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9a4a1b44-649f-4956-81fc-8be2fb503d7b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422075 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzd7b\" (UniqueName: \"kubernetes.io/projected/b2766a44-24c7-48ff-943f-3a225eb74dec-kube-api-access-bzd7b\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422090 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5392cef5-bba8-474f-a355-a505f056b6a5-metrics-certs\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422106 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6faf1e4-6165-479e-95a2-25e37852f252-service-ca-bundle\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422121 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-images\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422137 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422156 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-trusted-ca-bundle\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422172 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422190 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgnqz\" (UniqueName: \"kubernetes.io/projected/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-kube-api-access-pgnqz\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422206 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffb6abfd-3376-4908-b388-7c398e36f986-serving-cert\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422225 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-serving-cert\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.422960 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.425218 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6faf1e4-6165-479e-95a2-25e37852f252-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.426011 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/14baf1ce-9a21-4232-a0b0-3ad606d31e45-trusted-ca\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.426362 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cf2410ff-f182-4416-ba76-49b77dfcce3a-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cm9hg\" (UID: \"cf2410ff-f182-4416-ba76-49b77dfcce3a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.426986 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.427544 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqzhj"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.427858 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.428252 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.428819 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.428960 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.432922 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kmw7r"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.433074 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.433386 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.434560 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-image-import-ca\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.435367 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5392cef5-bba8-474f-a355-a505f056b6a5-service-ca-bundle\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.435503 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.436257 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.436812 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.436990 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.437953 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-serving-cert\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.438120 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.438806 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-auth-proxy-config\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.439599 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.439919 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-serving-cert\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.441176 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-metrics-tls\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.442194 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.442423 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14baf1ce-9a21-4232-a0b0-3ad606d31e45-config\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.442669 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g4hs6"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.442689 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.443617 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wksjf"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.443999 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.444012 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.444089 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.444259 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b599dd60-130f-4b26-b87d-7df6b14d2d7e-config\") pod \"kube-controller-manager-operator-78b949d7b-zzldq\" (UID: \"b599dd60-130f-4b26-b87d-7df6b14d2d7e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.420251 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.444975 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1c0a8f40-cffd-4cac-b2c5-4d76fa89f611-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jc6sn\" (UID: \"1c0a8f40-cffd-4cac-b2c5-4d76fa89f611\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.445257 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/878328eb-ad2c-4cd7-aff4-19d12588b46d-images\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.445932 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-config\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.446057 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/878328eb-ad2c-4cd7-aff4-19d12588b46d-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.446539 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-service-ca\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.446554 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-client-ca\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.447333 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-config\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.447467 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-config\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.447907 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.447966 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-audit-dir\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.448512 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-client-ca\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.451003 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6faf1e4-6165-479e-95a2-25e37852f252-serving-cert\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.451072 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-config\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.451486 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-trusted-ca\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.451709 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-encryption-config\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.452177 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9a4a1b44-649f-4956-81fc-8be2fb503d7b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.453145 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.453678 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-machine-approver-tls\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.454982 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b599dd60-130f-4b26-b87d-7df6b14d2d7e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zzldq\" (UID: \"b599dd60-130f-4b26-b87d-7df6b14d2d7e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.455042 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-497mz"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.455049 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-etcd-client\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.455123 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/878328eb-ad2c-4cd7-aff4-19d12588b46d-proxy-tls\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.455322 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-n26j2"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.456400 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5392cef5-bba8-474f-a355-a505f056b6a5-default-certificate\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.457007 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-t6w5h"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.457429 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.457483 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf2410ff-f182-4416-ba76-49b77dfcce3a-serving-cert\") pod \"openshift-config-operator-7777fb866f-cm9hg\" (UID: \"cf2410ff-f182-4416-ba76-49b77dfcce3a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.466544 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.471964 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-policies\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.472958 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2766a44-24c7-48ff-943f-3a225eb74dec-serving-cert\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.473088 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-config\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.474644 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.474697 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-oauth-serving-cert\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.474823 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-node-pullsecrets\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.475431 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-etcd-serving-ca\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.478319 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.478742 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.479237 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-config\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.479814 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.480037 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5392cef5-bba8-474f-a355-a505f056b6a5-stats-auth\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.481705 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5392cef5-bba8-474f-a355-a505f056b6a5-metrics-certs\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.482208 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6faf1e4-6165-479e-95a2-25e37852f252-service-ca-bundle\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.483003 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-trusted-ca-bundle\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.483039 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-dir\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.484057 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rklgn"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.484083 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-5r2lm"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.484296 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14baf1ce-9a21-4232-a0b0-3ad606d31e45-serving-cert\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.484577 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.484593 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.484605 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.484625 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.484638 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.484650 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6fxnn"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.484662 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qj2jd"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.485208 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-audit\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.485788 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31f15e24-8b0d-4f12-b71f-29565e66b0ca-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mnt7l\" (UID: \"31f15e24-8b0d-4f12-b71f-29565e66b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.488473 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffb6abfd-3376-4908-b388-7c398e36f986-serving-cert\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.492269 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.492345 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.492819 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6faf1e4-6165-479e-95a2-25e37852f252-config\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.493625 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-zkhsq"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.494511 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.494528 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.494540 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.494555 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.494567 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.494580 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.494767 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31f15e24-8b0d-4f12-b71f-29565e66b0ca-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mnt7l\" (UID: \"31f15e24-8b0d-4f12-b71f-29565e66b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.494809 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-t6w5h" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.494860 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.494874 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-images\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.495062 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5r2lm" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.495457 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-zkhsq" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.495870 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.499252 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9a4a1b44-649f-4956-81fc-8be2fb503d7b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.499582 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.499621 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.502397 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-oauth-config\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.512109 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.521899 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.521963 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g4hs6"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524296 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9v56\" (UniqueName: \"kubernetes.io/projected/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-kube-api-access-b9v56\") pod \"olm-operator-6b444d44fb-v8nrr\" (UID: \"f51320f2-d5d7-4d3a-a23a-efbe6290fe51\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524323 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c6c38fc-98a5-4280-ab21-f967146f3edc-proxy-tls\") pod \"machine-config-controller-84d6567774-mt7vm\" (UID: \"1c6c38fc-98a5-4280-ab21-f967146f3edc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524371 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6fe85c5e-41ae-4c6a-ad16-21f46ccee66e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-bsk9d\" (UID: \"6fe85c5e-41ae-4c6a-ad16-21f46ccee66e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524395 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df4eacbc-4ebc-49ec-ac29-7581f96e799b-serving-cert\") pod \"service-ca-operator-777779d784-wksjf\" (UID: \"df4eacbc-4ebc-49ec-ac29-7581f96e799b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524434 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jf2j\" (UniqueName: \"kubernetes.io/projected/6154a418-bf83-481b-9e04-e870a95548db-kube-api-access-6jf2j\") pod \"openshift-controller-manager-operator-756b6f6bc6-zr76q\" (UID: \"6154a418-bf83-481b-9e04-e870a95548db\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524454 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-cabundle\") pod \"service-ca-9c57cc56f-kmw7r\" (UID: \"a8582aa5-89aa-4a01-b168-22605edaf7cb\") " pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524472 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r655x\" (UniqueName: \"kubernetes.io/projected/4bdf9ed1-c4f3-4364-866b-ee6098476ab1-kube-api-access-r655x\") pod \"migrator-59844c95c7-298vs\" (UID: \"4bdf9ed1-c4f3-4364-866b-ee6098476ab1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524488 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxkjp\" (UniqueName: \"kubernetes.io/projected/67eaa966-b8bc-427c-a47f-1314b84d74cc-kube-api-access-gxkjp\") pod \"dns-default-t6w5h\" (UID: \"67eaa966-b8bc-427c-a47f-1314b84d74cc\") " pod="openshift-dns/dns-default-t6w5h" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524527 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwjsj\" (UniqueName: \"kubernetes.io/projected/d566f76d-7861-4304-b121-22fddb254188-kube-api-access-dwjsj\") pod \"multus-admission-controller-857f4d67dd-w89sw\" (UID: \"d566f76d-7861-4304-b121-22fddb254188\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524550 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6154a418-bf83-481b-9e04-e870a95548db-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-zr76q\" (UID: \"6154a418-bf83-481b-9e04-e870a95548db\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524571 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g4hs6\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524587 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3c7843cb-a7a2-4db4-b244-f88476448291-secret-volume\") pod \"collect-profiles-29323995-msb78\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524612 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bp6jh\" (UniqueName: \"kubernetes.io/projected/607b13e2-3423-46e7-938c-cb7ad263e017-kube-api-access-bp6jh\") pod \"catalog-operator-68c6474976-mt9qf\" (UID: \"607b13e2-3423-46e7-938c-cb7ad263e017\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524644 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g4hs6\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524676 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/607b13e2-3423-46e7-938c-cb7ad263e017-profile-collector-cert\") pod \"catalog-operator-68c6474976-mt9qf\" (UID: \"607b13e2-3423-46e7-938c-cb7ad263e017\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524694 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hxzm\" (UniqueName: \"kubernetes.io/projected/610b97c0-7fa1-4c6b-bfb8-2247491aae21-kube-api-access-6hxzm\") pod \"marketplace-operator-79b997595-g4hs6\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524709 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3c7843cb-a7a2-4db4-b244-f88476448291-config-volume\") pod \"collect-profiles-29323995-msb78\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524723 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-srv-cert\") pod \"olm-operator-6b444d44fb-v8nrr\" (UID: \"f51320f2-d5d7-4d3a-a23a-efbe6290fe51\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524760 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6fe85c5e-41ae-4c6a-ad16-21f46ccee66e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-bsk9d\" (UID: \"6fe85c5e-41ae-4c6a-ad16-21f46ccee66e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524777 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kwhl\" (UniqueName: \"kubernetes.io/projected/700f2044-4ee9-4adb-ab32-f41e84362b15-kube-api-access-2kwhl\") pod \"package-server-manager-789f6589d5-cf5tr\" (UID: \"700f2044-4ee9-4adb-ab32-f41e84362b15\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524796 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/67eaa966-b8bc-427c-a47f-1314b84d74cc-metrics-tls\") pod \"dns-default-t6w5h\" (UID: \"67eaa966-b8bc-427c-a47f-1314b84d74cc\") " pod="openshift-dns/dns-default-t6w5h" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524811 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/607b13e2-3423-46e7-938c-cb7ad263e017-srv-cert\") pod \"catalog-operator-68c6474976-mt9qf\" (UID: \"607b13e2-3423-46e7-938c-cb7ad263e017\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.524827 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/16abd9f8-9089-4ecd-8314-06a470d37b8a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7t4m5\" (UID: \"16abd9f8-9089-4ecd-8314-06a470d37b8a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.536508 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl4vc\" (UniqueName: \"kubernetes.io/projected/6fe85c5e-41ae-4c6a-ad16-21f46ccee66e-kube-api-access-fl4vc\") pod \"kube-storage-version-migrator-operator-b67b599dd-bsk9d\" (UID: \"6fe85c5e-41ae-4c6a-ad16-21f46ccee66e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.536615 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ljw6\" (UniqueName: \"kubernetes.io/projected/97a981ff-12be-4043-afb9-d8c9fa7ef9d5-kube-api-access-9ljw6\") pod \"dns-operator-744455d44c-mc647\" (UID: \"97a981ff-12be-4043-afb9-d8c9fa7ef9d5\") " pod="openshift-dns-operator/dns-operator-744455d44c-mc647" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.536641 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22kld\" (UniqueName: \"kubernetes.io/projected/df4eacbc-4ebc-49ec-ac29-7581f96e799b-kube-api-access-22kld\") pod \"service-ca-operator-777779d784-wksjf\" (UID: \"df4eacbc-4ebc-49ec-ac29-7581f96e799b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.536659 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vc55\" (UniqueName: \"kubernetes.io/projected/3c7843cb-a7a2-4db4-b244-f88476448291-kube-api-access-5vc55\") pod \"collect-profiles-29323995-msb78\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.536687 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/700f2044-4ee9-4adb-ab32-f41e84362b15-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-cf5tr\" (UID: \"700f2044-4ee9-4adb-ab32-f41e84362b15\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.536714 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67eaa966-b8bc-427c-a47f-1314b84d74cc-config-volume\") pod \"dns-default-t6w5h\" (UID: \"67eaa966-b8bc-427c-a47f-1314b84d74cc\") " pod="openshift-dns/dns-default-t6w5h" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.536817 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-profile-collector-cert\") pod \"olm-operator-6b444d44fb-v8nrr\" (UID: \"f51320f2-d5d7-4d3a-a23a-efbe6290fe51\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.536909 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c6c38fc-98a5-4280-ab21-f967146f3edc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-mt7vm\" (UID: \"1c6c38fc-98a5-4280-ab21-f967146f3edc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.536935 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df4eacbc-4ebc-49ec-ac29-7581f96e799b-config\") pod \"service-ca-operator-777779d784-wksjf\" (UID: \"df4eacbc-4ebc-49ec-ac29-7581f96e799b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.536979 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d566f76d-7861-4304-b121-22fddb254188-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-w89sw\" (UID: \"d566f76d-7861-4304-b121-22fddb254188\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.537001 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-key\") pod \"service-ca-9c57cc56f-kmw7r\" (UID: \"a8582aa5-89aa-4a01-b168-22605edaf7cb\") " pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.537044 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mll24\" (UniqueName: \"kubernetes.io/projected/16abd9f8-9089-4ecd-8314-06a470d37b8a-kube-api-access-mll24\") pod \"control-plane-machine-set-operator-78cbb6b69f-7t4m5\" (UID: \"16abd9f8-9089-4ecd-8314-06a470d37b8a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.537107 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6154a418-bf83-481b-9e04-e870a95548db-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-zr76q\" (UID: \"6154a418-bf83-481b-9e04-e870a95548db\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.537135 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thksq\" (UniqueName: \"kubernetes.io/projected/1c6c38fc-98a5-4280-ab21-f967146f3edc-kube-api-access-thksq\") pod \"machine-config-controller-84d6567774-mt7vm\" (UID: \"1c6c38fc-98a5-4280-ab21-f967146f3edc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.552938 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c6c38fc-98a5-4280-ab21-f967146f3edc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-mt7vm\" (UID: \"1c6c38fc-98a5-4280-ab21-f967146f3edc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.556630 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/97a981ff-12be-4043-afb9-d8c9fa7ef9d5-metrics-tls\") pod \"dns-operator-744455d44c-mc647\" (UID: \"97a981ff-12be-4043-afb9-d8c9fa7ef9d5\") " pod="openshift-dns-operator/dns-operator-744455d44c-mc647" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.556708 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvzbl\" (UniqueName: \"kubernetes.io/projected/a8582aa5-89aa-4a01-b168-22605edaf7cb-kube-api-access-xvzbl\") pod \"service-ca-9c57cc56f-kmw7r\" (UID: \"a8582aa5-89aa-4a01-b168-22605edaf7cb\") " pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.562506 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.567163 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.571359 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.572135 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.572832 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lnls5"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.586184 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.586626 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.590863 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d53d9154-d213-435c-9b1c-ae798dcfc3e9-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bdjjn\" (UID: \"d53d9154-d213-435c-9b1c-ae798dcfc3e9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.591463 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.593424 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wdwwc"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.594631 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.595813 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-xfzw2"] Oct 02 21:25:58 crc kubenswrapper[4636]: W1002 21:25:58.596370 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-536f65319802d4ed25ed68d45f73004f20e0c765fd5b3be65237adb44ac777b4 WatchSource:0}: Error finding container 536f65319802d4ed25ed68d45f73004f20e0c765fd5b3be65237adb44ac777b4: Status 404 returned error can't find the container with id 536f65319802d4ed25ed68d45f73004f20e0c765fd5b3be65237adb44ac777b4 Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.597049 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-w89sw"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.597107 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.598127 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76lrr"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.599292 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.600422 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5r2lm"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.601820 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.602937 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mc647"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.604021 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-t6w5h"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.604763 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.605918 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.606624 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qj2jd"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.609902 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqzhj"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.610736 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.612335 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kmw7r"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.613438 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wksjf"] Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.614727 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d53d9154-d213-435c-9b1c-ae798dcfc3e9-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bdjjn\" (UID: \"d53d9154-d213-435c-9b1c-ae798dcfc3e9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.626035 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.646883 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.650571 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6fe85c5e-41ae-4c6a-ad16-21f46ccee66e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-bsk9d\" (UID: \"6fe85c5e-41ae-4c6a-ad16-21f46ccee66e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658623 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvzbl\" (UniqueName: \"kubernetes.io/projected/a8582aa5-89aa-4a01-b168-22605edaf7cb-kube-api-access-xvzbl\") pod \"service-ca-9c57cc56f-kmw7r\" (UID: \"a8582aa5-89aa-4a01-b168-22605edaf7cb\") " pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658665 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9v56\" (UniqueName: \"kubernetes.io/projected/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-kube-api-access-b9v56\") pod \"olm-operator-6b444d44fb-v8nrr\" (UID: \"f51320f2-d5d7-4d3a-a23a-efbe6290fe51\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658704 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df4eacbc-4ebc-49ec-ac29-7581f96e799b-serving-cert\") pod \"service-ca-operator-777779d784-wksjf\" (UID: \"df4eacbc-4ebc-49ec-ac29-7581f96e799b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658775 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-cabundle\") pod \"service-ca-9c57cc56f-kmw7r\" (UID: \"a8582aa5-89aa-4a01-b168-22605edaf7cb\") " pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658797 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r655x\" (UniqueName: \"kubernetes.io/projected/4bdf9ed1-c4f3-4364-866b-ee6098476ab1-kube-api-access-r655x\") pod \"migrator-59844c95c7-298vs\" (UID: \"4bdf9ed1-c4f3-4364-866b-ee6098476ab1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658814 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxkjp\" (UniqueName: \"kubernetes.io/projected/67eaa966-b8bc-427c-a47f-1314b84d74cc-kube-api-access-gxkjp\") pod \"dns-default-t6w5h\" (UID: \"67eaa966-b8bc-427c-a47f-1314b84d74cc\") " pod="openshift-dns/dns-default-t6w5h" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658852 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwjsj\" (UniqueName: \"kubernetes.io/projected/d566f76d-7861-4304-b121-22fddb254188-kube-api-access-dwjsj\") pod \"multus-admission-controller-857f4d67dd-w89sw\" (UID: \"d566f76d-7861-4304-b121-22fddb254188\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658882 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g4hs6\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658899 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3c7843cb-a7a2-4db4-b244-f88476448291-secret-volume\") pod \"collect-profiles-29323995-msb78\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658918 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bp6jh\" (UniqueName: \"kubernetes.io/projected/607b13e2-3423-46e7-938c-cb7ad263e017-kube-api-access-bp6jh\") pod \"catalog-operator-68c6474976-mt9qf\" (UID: \"607b13e2-3423-46e7-938c-cb7ad263e017\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658939 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g4hs6\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658959 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/607b13e2-3423-46e7-938c-cb7ad263e017-profile-collector-cert\") pod \"catalog-operator-68c6474976-mt9qf\" (UID: \"607b13e2-3423-46e7-938c-cb7ad263e017\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.658976 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hxzm\" (UniqueName: \"kubernetes.io/projected/610b97c0-7fa1-4c6b-bfb8-2247491aae21-kube-api-access-6hxzm\") pod \"marketplace-operator-79b997595-g4hs6\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659003 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3c7843cb-a7a2-4db4-b244-f88476448291-config-volume\") pod \"collect-profiles-29323995-msb78\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659019 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-srv-cert\") pod \"olm-operator-6b444d44fb-v8nrr\" (UID: \"f51320f2-d5d7-4d3a-a23a-efbe6290fe51\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659049 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/67eaa966-b8bc-427c-a47f-1314b84d74cc-metrics-tls\") pod \"dns-default-t6w5h\" (UID: \"67eaa966-b8bc-427c-a47f-1314b84d74cc\") " pod="openshift-dns/dns-default-t6w5h" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659068 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/607b13e2-3423-46e7-938c-cb7ad263e017-srv-cert\") pod \"catalog-operator-68c6474976-mt9qf\" (UID: \"607b13e2-3423-46e7-938c-cb7ad263e017\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659085 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/16abd9f8-9089-4ecd-8314-06a470d37b8a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7t4m5\" (UID: \"16abd9f8-9089-4ecd-8314-06a470d37b8a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659134 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vc55\" (UniqueName: \"kubernetes.io/projected/3c7843cb-a7a2-4db4-b244-f88476448291-kube-api-access-5vc55\") pod \"collect-profiles-29323995-msb78\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659158 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22kld\" (UniqueName: \"kubernetes.io/projected/df4eacbc-4ebc-49ec-ac29-7581f96e799b-kube-api-access-22kld\") pod \"service-ca-operator-777779d784-wksjf\" (UID: \"df4eacbc-4ebc-49ec-ac29-7581f96e799b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659187 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67eaa966-b8bc-427c-a47f-1314b84d74cc-config-volume\") pod \"dns-default-t6w5h\" (UID: \"67eaa966-b8bc-427c-a47f-1314b84d74cc\") " pod="openshift-dns/dns-default-t6w5h" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659210 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-profile-collector-cert\") pod \"olm-operator-6b444d44fb-v8nrr\" (UID: \"f51320f2-d5d7-4d3a-a23a-efbe6290fe51\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659256 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df4eacbc-4ebc-49ec-ac29-7581f96e799b-config\") pod \"service-ca-operator-777779d784-wksjf\" (UID: \"df4eacbc-4ebc-49ec-ac29-7581f96e799b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659273 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d566f76d-7861-4304-b121-22fddb254188-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-w89sw\" (UID: \"d566f76d-7861-4304-b121-22fddb254188\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659292 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-key\") pod \"service-ca-9c57cc56f-kmw7r\" (UID: \"a8582aa5-89aa-4a01-b168-22605edaf7cb\") " pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.659324 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mll24\" (UniqueName: \"kubernetes.io/projected/16abd9f8-9089-4ecd-8314-06a470d37b8a-kube-api-access-mll24\") pod \"control-plane-machine-set-operator-78cbb6b69f-7t4m5\" (UID: \"16abd9f8-9089-4ecd-8314-06a470d37b8a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.665441 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.685233 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.691741 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6fe85c5e-41ae-4c6a-ad16-21f46ccee66e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-bsk9d\" (UID: \"6fe85c5e-41ae-4c6a-ad16-21f46ccee66e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.705785 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.747593 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pknjm\" (UniqueName: \"kubernetes.io/projected/c385fb47-f4e5-4934-a44e-6ee2caed0450-kube-api-access-pknjm\") pod \"apiserver-7bbb656c7d-n8ql5\" (UID: \"c385fb47-f4e5-4934-a44e-6ee2caed0450\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.766256 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.785871 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.803001 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.817088 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6154a418-bf83-481b-9e04-e870a95548db-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-zr76q\" (UID: \"6154a418-bf83-481b-9e04-e870a95548db\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.817965 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.828832 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.845483 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.855090 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6154a418-bf83-481b-9e04-e870a95548db-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-zr76q\" (UID: \"6154a418-bf83-481b-9e04-e870a95548db\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.866524 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.872828 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c6c38fc-98a5-4280-ab21-f967146f3edc-proxy-tls\") pod \"machine-config-controller-84d6567774-mt7vm\" (UID: \"1c6c38fc-98a5-4280-ab21-f967146f3edc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.890145 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.911184 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.926375 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.945422 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.962462 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/700f2044-4ee9-4adb-ab32-f41e84362b15-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-cf5tr\" (UID: \"700f2044-4ee9-4adb-ab32-f41e84362b15\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.965793 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 02 21:25:58 crc kubenswrapper[4636]: I1002 21:25:58.985353 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.005876 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.014926 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/97a981ff-12be-4043-afb9-d8c9fa7ef9d5-metrics-tls\") pod \"dns-operator-744455d44c-mc647\" (UID: \"97a981ff-12be-4043-afb9-d8c9fa7ef9d5\") " pod="openshift-dns-operator/dns-operator-744455d44c-mc647" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.025886 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.045629 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.053060 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5"] Oct 02 21:25:59 crc kubenswrapper[4636]: W1002 21:25:59.058336 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc385fb47_f4e5_4934_a44e_6ee2caed0450.slice/crio-9849d93f8c1ca2d965bc0fcbbcb251f08acbf192cdd09c9fefd1753302088971 WatchSource:0}: Error finding container 9849d93f8c1ca2d965bc0fcbbcb251f08acbf192cdd09c9fefd1753302088971: Status 404 returned error can't find the container with id 9849d93f8c1ca2d965bc0fcbbcb251f08acbf192cdd09c9fefd1753302088971 Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.078864 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.084449 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.104988 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.126692 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.145058 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.165673 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.186163 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.206665 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.225660 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.235458 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/607b13e2-3423-46e7-938c-cb7ad263e017-profile-collector-cert\") pod \"catalog-operator-68c6474976-mt9qf\" (UID: \"607b13e2-3423-46e7-938c-cb7ad263e017\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.235742 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3c7843cb-a7a2-4db4-b244-f88476448291-secret-volume\") pod \"collect-profiles-29323995-msb78\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.236847 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-profile-collector-cert\") pod \"olm-operator-6b444d44fb-v8nrr\" (UID: \"f51320f2-d5d7-4d3a-a23a-efbe6290fe51\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.245456 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.266107 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.273201 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d566f76d-7861-4304-b121-22fddb254188-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-w89sw\" (UID: \"d566f76d-7861-4304-b121-22fddb254188\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.285619 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.293062 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/607b13e2-3423-46e7-938c-cb7ad263e017-srv-cert\") pod \"catalog-operator-68c6474976-mt9qf\" (UID: \"607b13e2-3423-46e7-938c-cb7ad263e017\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.306111 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.326002 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.344936 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.366407 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.387370 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.391177 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3c7843cb-a7a2-4db4-b244-f88476448291-config-volume\") pod \"collect-profiles-29323995-msb78\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.406118 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.414663 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/16abd9f8-9089-4ecd-8314-06a470d37b8a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7t4m5\" (UID: \"16abd9f8-9089-4ecd-8314-06a470d37b8a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.425734 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.444124 4636 request.go:700] Waited for 1.014929453s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dcollect-profiles-dockercfg-kzf4t&limit=500&resourceVersion=0 Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.445738 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.461341 4636 generic.go:334] "Generic (PLEG): container finished" podID="c385fb47-f4e5-4934-a44e-6ee2caed0450" containerID="8b1388ffdebe539af0ebb5bdbdeb6cfcb6376592317e509393e367a04ebf89ec" exitCode=0 Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.461409 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" event={"ID":"c385fb47-f4e5-4934-a44e-6ee2caed0450","Type":"ContainerDied","Data":"8b1388ffdebe539af0ebb5bdbdeb6cfcb6376592317e509393e367a04ebf89ec"} Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.461436 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" event={"ID":"c385fb47-f4e5-4934-a44e-6ee2caed0450","Type":"ContainerStarted","Data":"9849d93f8c1ca2d965bc0fcbbcb251f08acbf192cdd09c9fefd1753302088971"} Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.463158 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"58e7948a94c47dfab07c4fed5dc8ba7b713937036d170be50120f92e10c8fa35"} Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.463181 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"3ba55105282b38d4dae8fe1ccd3e17dd31e3e5a166ae53bfcf30d322642931a1"} Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.463465 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.465641 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.466443 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"38e5d611edb02ce28eead070d691e88fd27f7a05d9d0693536091e351cad17d8"} Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.466468 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"5f3a75ddeac2c530a0d44e4fd8f1c9d6300688dfee091dacba0a2bb3d3b0825a"} Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.468799 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"abf14c1131aa1e72f2f5e69a5ba86d215c686e3a434a1d6add60b3bc3fee134c"} Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.468825 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"536f65319802d4ed25ed68d45f73004f20e0c765fd5b3be65237adb44ac777b4"} Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.485170 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.538977 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctr9b\" (UniqueName: \"kubernetes.io/projected/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-kube-api-access-ctr9b\") pod \"console-f9d7485db-wdwwc\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.543070 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m62bl\" (UniqueName: \"kubernetes.io/projected/cf2410ff-f182-4416-ba76-49b77dfcce3a-kube-api-access-m62bl\") pod \"openshift-config-operator-7777fb866f-cm9hg\" (UID: \"cf2410ff-f182-4416-ba76-49b77dfcce3a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.571641 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jznsc\" (UniqueName: \"kubernetes.io/projected/4dec7d6d-2309-486c-bebe-19dce69f40d8-kube-api-access-jznsc\") pod \"downloads-7954f5f757-n26j2\" (UID: \"4dec7d6d-2309-486c-bebe-19dce69f40d8\") " pod="openshift-console/downloads-7954f5f757-n26j2" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.595437 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gwtr\" (UniqueName: \"kubernetes.io/projected/878328eb-ad2c-4cd7-aff4-19d12588b46d-kube-api-access-2gwtr\") pod \"machine-config-operator-74547568cd-tkzhx\" (UID: \"878328eb-ad2c-4cd7-aff4-19d12588b46d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.605210 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.612075 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5hgp\" (UniqueName: \"kubernetes.io/projected/458c1259-882d-4c4c-a4b6-e2c93bb9fe70-kube-api-access-k5hgp\") pod \"machine-approver-56656f9798-fmtgx\" (UID: \"458c1259-882d-4c4c-a4b6-e2c93bb9fe70\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.634455 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.644387 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6g99\" (UniqueName: \"kubernetes.io/projected/ffb6abfd-3376-4908-b388-7c398e36f986-kube-api-access-x6g99\") pod \"controller-manager-879f6c89f-rklgn\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.646411 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 02 21:25:59 crc kubenswrapper[4636]: W1002 21:25:59.653159 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod458c1259_882d_4c4c_a4b6_e2c93bb9fe70.slice/crio-7811c76a7e9802076b0b34e976216346e5437d954cfa66de7f26b4f70a857a52 WatchSource:0}: Error finding container 7811c76a7e9802076b0b34e976216346e5437d954cfa66de7f26b4f70a857a52: Status 404 returned error can't find the container with id 7811c76a7e9802076b0b34e976216346e5437d954cfa66de7f26b4f70a857a52 Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.654217 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659102 4636 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659225 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/df4eacbc-4ebc-49ec-ac29-7581f96e799b-serving-cert podName:df4eacbc-4ebc-49ec-ac29-7581f96e799b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:00.159202518 +0000 UTC m=+151.482210537 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/df4eacbc-4ebc-49ec-ac29-7581f96e799b-serving-cert") pod "service-ca-operator-777779d784-wksjf" (UID: "df4eacbc-4ebc-49ec-ac29-7581f96e799b") : failed to sync secret cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659283 4636 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659314 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-trusted-ca podName:610b97c0-7fa1-4c6b-bfb8-2247491aae21 nodeName:}" failed. No retries permitted until 2025-10-02 21:26:00.15930514 +0000 UTC m=+151.482313159 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-trusted-ca") pod "marketplace-operator-79b997595-g4hs6" (UID: "610b97c0-7fa1-4c6b-bfb8-2247491aae21") : failed to sync configmap cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659341 4636 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659369 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-cabundle podName:a8582aa5-89aa-4a01-b168-22605edaf7cb nodeName:}" failed. No retries permitted until 2025-10-02 21:26:00.159357762 +0000 UTC m=+151.482366021 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-cabundle") pod "service-ca-9c57cc56f-kmw7r" (UID: "a8582aa5-89aa-4a01-b168-22605edaf7cb") : failed to sync configmap cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659399 4636 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659422 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/df4eacbc-4ebc-49ec-ac29-7581f96e799b-config podName:df4eacbc-4ebc-49ec-ac29-7581f96e799b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:00.159415324 +0000 UTC m=+151.482423343 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/df4eacbc-4ebc-49ec-ac29-7581f96e799b-config") pod "service-ca-operator-777779d784-wksjf" (UID: "df4eacbc-4ebc-49ec-ac29-7581f96e799b") : failed to sync configmap cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659824 4636 configmap.go:193] Couldn't get configMap openshift-dns/dns-default: failed to sync configmap cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659857 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/67eaa966-b8bc-427c-a47f-1314b84d74cc-config-volume podName:67eaa966-b8bc-427c-a47f-1314b84d74cc nodeName:}" failed. No retries permitted until 2025-10-02 21:26:00.159848146 +0000 UTC m=+151.482856385 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/67eaa966-b8bc-427c-a47f-1314b84d74cc-config-volume") pod "dns-default-t6w5h" (UID: "67eaa966-b8bc-427c-a47f-1314b84d74cc") : failed to sync configmap cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659891 4636 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659918 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-key podName:a8582aa5-89aa-4a01-b168-22605edaf7cb nodeName:}" failed. No retries permitted until 2025-10-02 21:26:00.159910597 +0000 UTC m=+151.482918866 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-key") pod "service-ca-9c57cc56f-kmw7r" (UID: "a8582aa5-89aa-4a01-b168-22605edaf7cb") : failed to sync secret cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659940 4636 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659968 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-operator-metrics podName:610b97c0-7fa1-4c6b-bfb8-2247491aae21 nodeName:}" failed. No retries permitted until 2025-10-02 21:26:00.159960199 +0000 UTC m=+151.482968218 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-operator-metrics") pod "marketplace-operator-79b997595-g4hs6" (UID: "610b97c0-7fa1-4c6b-bfb8-2247491aae21") : failed to sync secret cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.659985 4636 secret.go:188] Couldn't get secret openshift-dns/dns-default-metrics-tls: failed to sync secret cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.660009 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/67eaa966-b8bc-427c-a47f-1314b84d74cc-metrics-tls podName:67eaa966-b8bc-427c-a47f-1314b84d74cc nodeName:}" failed. No retries permitted until 2025-10-02 21:26:00.1600013 +0000 UTC m=+151.483009569 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/67eaa966-b8bc-427c-a47f-1314b84d74cc-metrics-tls") pod "dns-default-t6w5h" (UID: "67eaa966-b8bc-427c-a47f-1314b84d74cc") : failed to sync secret cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.660030 4636 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: E1002 21:25:59.660058 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-srv-cert podName:f51320f2-d5d7-4d3a-a23a-efbe6290fe51 nodeName:}" failed. No retries permitted until 2025-10-02 21:26:00.160048181 +0000 UTC m=+151.483056200 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-srv-cert") pod "olm-operator-6b444d44fb-v8nrr" (UID: "f51320f2-d5d7-4d3a-a23a-efbe6290fe51") : failed to sync secret cache: timed out waiting for the condition Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.664659 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.666968 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.686568 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.694774 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.728348 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d53d9154-d213-435c-9b1c-ae798dcfc3e9-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bdjjn\" (UID: \"d53d9154-d213-435c-9b1c-ae798dcfc3e9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.743604 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twvlw\" (UniqueName: \"kubernetes.io/projected/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-kube-api-access-twvlw\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.747394 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-n26j2" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.774816 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6djb9\" (UniqueName: \"kubernetes.io/projected/6252e8b9-5b10-4dbd-9f02-50e0a5e47233-kube-api-access-6djb9\") pod \"machine-api-operator-5694c8668f-84tsd\" (UID: \"6252e8b9-5b10-4dbd-9f02-50e0a5e47233\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.787716 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.789264 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.790010 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dhfb5\" (UID: \"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.805792 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.826319 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.834902 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.847279 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.863186 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx"] Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.888694 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9lcr\" (UniqueName: \"kubernetes.io/projected/5392cef5-bba8-474f-a355-a505f056b6a5-kube-api-access-z9lcr\") pod \"router-default-5444994796-gbfdn\" (UID: \"5392cef5-bba8-474f-a355-a505f056b6a5\") " pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.909274 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.917888 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.926343 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.945154 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.945266 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.980169 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" Oct 02 21:25:59 crc kubenswrapper[4636]: I1002 21:25:59.982394 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvxvs\" (UniqueName: \"kubernetes.io/projected/d6faf1e4-6165-479e-95a2-25e37852f252-kube-api-access-xvxvs\") pod \"authentication-operator-69f744f599-6fxnn\" (UID: \"d6faf1e4-6165-479e-95a2-25e37852f252\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.001049 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g6cw\" (UniqueName: \"kubernetes.io/projected/9a4a1b44-649f-4956-81fc-8be2fb503d7b-kube-api-access-7g6cw\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.005289 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.024395 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.040232 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rklgn"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.045150 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.070049 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wdwwc"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.071792 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.084702 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 02 21:26:00 crc kubenswrapper[4636]: W1002 21:26:00.104006 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb4ddd281_18ba_41ec_b5a6_788b9f5a942e.slice/crio-bd7aa2fc8fabef2d8a1d5abbe48e5c6fefd07e8d48cb4374034448e7a2aae36c WatchSource:0}: Error finding container bd7aa2fc8fabef2d8a1d5abbe48e5c6fefd07e8d48cb4374034448e7a2aae36c: Status 404 returned error can't find the container with id bd7aa2fc8fabef2d8a1d5abbe48e5c6fefd07e8d48cb4374034448e7a2aae36c Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.111427 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.127252 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.138629 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.150193 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.166028 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.172531 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.173489 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.190602 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-srv-cert\") pod \"olm-operator-6b444d44fb-v8nrr\" (UID: \"f51320f2-d5d7-4d3a-a23a-efbe6290fe51\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.190659 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/67eaa966-b8bc-427c-a47f-1314b84d74cc-metrics-tls\") pod \"dns-default-t6w5h\" (UID: \"67eaa966-b8bc-427c-a47f-1314b84d74cc\") " pod="openshift-dns/dns-default-t6w5h" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.190736 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67eaa966-b8bc-427c-a47f-1314b84d74cc-config-volume\") pod \"dns-default-t6w5h\" (UID: \"67eaa966-b8bc-427c-a47f-1314b84d74cc\") " pod="openshift-dns/dns-default-t6w5h" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.190794 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df4eacbc-4ebc-49ec-ac29-7581f96e799b-config\") pod \"service-ca-operator-777779d784-wksjf\" (UID: \"df4eacbc-4ebc-49ec-ac29-7581f96e799b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.190822 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-key\") pod \"service-ca-9c57cc56f-kmw7r\" (UID: \"a8582aa5-89aa-4a01-b168-22605edaf7cb\") " pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.190902 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df4eacbc-4ebc-49ec-ac29-7581f96e799b-serving-cert\") pod \"service-ca-operator-777779d784-wksjf\" (UID: \"df4eacbc-4ebc-49ec-ac29-7581f96e799b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.190932 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-cabundle\") pod \"service-ca-9c57cc56f-kmw7r\" (UID: \"a8582aa5-89aa-4a01-b168-22605edaf7cb\") " pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.191004 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g4hs6\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.191046 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g4hs6\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.192626 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-cabundle\") pod \"service-ca-9c57cc56f-kmw7r\" (UID: \"a8582aa5-89aa-4a01-b168-22605edaf7cb\") " pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.194401 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g4hs6\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.195516 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df4eacbc-4ebc-49ec-ac29-7581f96e799b-config\") pod \"service-ca-operator-777779d784-wksjf\" (UID: \"df4eacbc-4ebc-49ec-ac29-7581f96e799b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.199800 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a8582aa5-89aa-4a01-b168-22605edaf7cb-signing-key\") pod \"service-ca-9c57cc56f-kmw7r\" (UID: \"a8582aa5-89aa-4a01-b168-22605edaf7cb\") " pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.200835 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-srv-cert\") pod \"olm-operator-6b444d44fb-v8nrr\" (UID: \"f51320f2-d5d7-4d3a-a23a-efbe6290fe51\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.203842 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxpkb\" (UniqueName: \"kubernetes.io/projected/1c0a8f40-cffd-4cac-b2c5-4d76fa89f611-kube-api-access-pxpkb\") pod \"cluster-samples-operator-665b6dd947-jc6sn\" (UID: \"1c0a8f40-cffd-4cac-b2c5-4d76fa89f611\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.204027 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g4hs6\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.208415 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df4eacbc-4ebc-49ec-ac29-7581f96e799b-serving-cert\") pod \"service-ca-operator-777779d784-wksjf\" (UID: \"df4eacbc-4ebc-49ec-ac29-7581f96e799b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.230810 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b599dd60-130f-4b26-b87d-7df6b14d2d7e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zzldq\" (UID: \"b599dd60-130f-4b26-b87d-7df6b14d2d7e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.244399 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4m8z\" (UniqueName: \"kubernetes.io/projected/776d4066-e52b-45f4-8d1c-eaad48feabc9-kube-api-access-w4m8z\") pod \"oauth-openshift-558db77b4-76lrr\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.256194 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.258026 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.261477 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gnsc\" (UniqueName: \"kubernetes.io/projected/14baf1ce-9a21-4232-a0b0-3ad606d31e45-kube-api-access-4gnsc\") pod \"console-operator-58897d9998-497mz\" (UID: \"14baf1ce-9a21-4232-a0b0-3ad606d31e45\") " pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.272485 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.282461 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9a4a1b44-649f-4956-81fc-8be2fb503d7b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lsk9m\" (UID: \"9a4a1b44-649f-4956-81fc-8be2fb503d7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.298994 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-84tsd"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.304633 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzd7b\" (UniqueName: \"kubernetes.io/projected/b2766a44-24c7-48ff-943f-3a225eb74dec-kube-api-access-bzd7b\") pod \"route-controller-manager-6576b87f9c-9759p\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.316922 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:26:00 crc kubenswrapper[4636]: W1002 21:26:00.326663 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6252e8b9_5b10_4dbd_9f02_50e0a5e47233.slice/crio-061ac962e95d4b5076b5bda235f82705e8eac68f152983dbfbfcbdb446aac5b5 WatchSource:0}: Error finding container 061ac962e95d4b5076b5bda235f82705e8eac68f152983dbfbfcbdb446aac5b5: Status 404 returned error can't find the container with id 061ac962e95d4b5076b5bda235f82705e8eac68f152983dbfbfcbdb446aac5b5 Oct 02 21:26:00 crc kubenswrapper[4636]: W1002 21:26:00.328551 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5392cef5_bba8_474f_a355_a505f056b6a5.slice/crio-49f3e3b30b4e9915ba6f82f00095f6e200237585b761000fb1c6825c96d145b5 WatchSource:0}: Error finding container 49f3e3b30b4e9915ba6f82f00095f6e200237585b761000fb1c6825c96d145b5: Status 404 returned error can't find the container with id 49f3e3b30b4e9915ba6f82f00095f6e200237585b761000fb1c6825c96d145b5 Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.328778 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlsr8\" (UniqueName: \"kubernetes.io/projected/31f15e24-8b0d-4f12-b71f-29565e66b0ca-kube-api-access-wlsr8\") pod \"openshift-apiserver-operator-796bbdcf4f-mnt7l\" (UID: \"31f15e24-8b0d-4f12-b71f-29565e66b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.344258 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.348037 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgnqz\" (UniqueName: \"kubernetes.io/projected/e3d86ad9-1a43-4146-aa2b-13a70e8acccb-kube-api-access-pgnqz\") pod \"apiserver-76f77b778f-lnls5\" (UID: \"e3d86ad9-1a43-4146-aa2b-13a70e8acccb\") " pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.365026 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.365458 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-n26j2"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.373472 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67eaa966-b8bc-427c-a47f-1314b84d74cc-config-volume\") pod \"dns-default-t6w5h\" (UID: \"67eaa966-b8bc-427c-a47f-1314b84d74cc\") " pod="openshift-dns/dns-default-t6w5h" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.379768 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.384549 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.400417 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/67eaa966-b8bc-427c-a47f-1314b84d74cc-metrics-tls\") pod \"dns-default-t6w5h\" (UID: \"67eaa966-b8bc-427c-a47f-1314b84d74cc\") " pod="openshift-dns/dns-default-t6w5h" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.412669 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.412799 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.422690 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.425740 4636 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.435248 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.444811 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.463785 4636 request.go:700] Waited for 1.96772289s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-canary/secrets?fieldSelector=metadata.name%3Ddefault-dockercfg-2llfx&limit=500&resourceVersion=0 Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.465882 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.488232 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.489056 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" event={"ID":"ffb6abfd-3376-4908-b388-7c398e36f986","Type":"ContainerStarted","Data":"26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.489090 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" event={"ID":"ffb6abfd-3376-4908-b388-7c398e36f986","Type":"ContainerStarted","Data":"d215aef1ee352044d2bec54b724de00649e5f5d99e021416e8e4b23e406adf6f"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.491690 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" event={"ID":"878328eb-ad2c-4cd7-aff4-19d12588b46d","Type":"ContainerStarted","Data":"67b7570941cf1675649d242793c6eee62cc36dd34a83d5ce1965dfb0b28c298d"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.491791 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" event={"ID":"878328eb-ad2c-4cd7-aff4-19d12588b46d","Type":"ContainerStarted","Data":"75aebc5169cd4d6b422fcabdde4796fc1c1062b08991a5be16cb4de9ad9622fb"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.492641 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.498711 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-n26j2" event={"ID":"4dec7d6d-2309-486c-bebe-19dce69f40d8","Type":"ContainerStarted","Data":"cc0ced827ea4010dc04a7e3de86ba6c246025effebd08b76425e49d35bfa9b60"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.499732 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-gbfdn" event={"ID":"5392cef5-bba8-474f-a355-a505f056b6a5","Type":"ContainerStarted","Data":"49f3e3b30b4e9915ba6f82f00095f6e200237585b761000fb1c6825c96d145b5"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.502770 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" event={"ID":"cf2410ff-f182-4416-ba76-49b77dfcce3a","Type":"ContainerStarted","Data":"944c48b663f7552cc22efb4e9289988588b1d21558e94984d38e8663a4e0a452"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.507908 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.513734 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" event={"ID":"6252e8b9-5b10-4dbd-9f02-50e0a5e47233","Type":"ContainerStarted","Data":"061ac962e95d4b5076b5bda235f82705e8eac68f152983dbfbfcbdb446aac5b5"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.515538 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wdwwc" event={"ID":"b4ddd281-18ba-41ec-b5a6-788b9f5a942e","Type":"ContainerStarted","Data":"bd7aa2fc8fabef2d8a1d5abbe48e5c6fefd07e8d48cb4374034448e7a2aae36c"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.517430 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" event={"ID":"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023","Type":"ContainerStarted","Data":"22031ce0ff66f80608eeb7cc2acb6b190673722eeb25bf9ef1ed222abc261cc8"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.520619 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" event={"ID":"458c1259-882d-4c4c-a4b6-e2c93bb9fe70","Type":"ContainerStarted","Data":"120c00a15d3030b00deb40df632a1d3760abba0ccdafff180b9597afa3704cbc"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.520647 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" event={"ID":"458c1259-882d-4c4c-a4b6-e2c93bb9fe70","Type":"ContainerStarted","Data":"7811c76a7e9802076b0b34e976216346e5437d954cfa66de7f26b4f70a857a52"} Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.525677 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.559121 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.560218 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.568371 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.586102 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.655426 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jf2j\" (UniqueName: \"kubernetes.io/projected/6154a418-bf83-481b-9e04-e870a95548db-kube-api-access-6jf2j\") pod \"openshift-controller-manager-operator-756b6f6bc6-zr76q\" (UID: \"6154a418-bf83-481b-9e04-e870a95548db\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.675557 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6fxnn"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.685414 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kwhl\" (UniqueName: \"kubernetes.io/projected/700f2044-4ee9-4adb-ab32-f41e84362b15-kube-api-access-2kwhl\") pod \"package-server-manager-789f6589d5-cf5tr\" (UID: \"700f2044-4ee9-4adb-ab32-f41e84362b15\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.718732 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ljw6\" (UniqueName: \"kubernetes.io/projected/97a981ff-12be-4043-afb9-d8c9fa7ef9d5-kube-api-access-9ljw6\") pod \"dns-operator-744455d44c-mc647\" (UID: \"97a981ff-12be-4043-afb9-d8c9fa7ef9d5\") " pod="openshift-dns-operator/dns-operator-744455d44c-mc647" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.719874 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl4vc\" (UniqueName: \"kubernetes.io/projected/6fe85c5e-41ae-4c6a-ad16-21f46ccee66e-kube-api-access-fl4vc\") pod \"kube-storage-version-migrator-operator-b67b599dd-bsk9d\" (UID: \"6fe85c5e-41ae-4c6a-ad16-21f46ccee66e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.765918 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.784295 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76lrr"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.797868 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxkjp\" (UniqueName: \"kubernetes.io/projected/67eaa966-b8bc-427c-a47f-1314b84d74cc-kube-api-access-gxkjp\") pod \"dns-default-t6w5h\" (UID: \"67eaa966-b8bc-427c-a47f-1314b84d74cc\") " pod="openshift-dns/dns-default-t6w5h" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.802182 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thksq\" (UniqueName: \"kubernetes.io/projected/1c6c38fc-98a5-4280-ab21-f967146f3edc-kube-api-access-thksq\") pod \"machine-config-controller-84d6567774-mt7vm\" (UID: \"1c6c38fc-98a5-4280-ab21-f967146f3edc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.803313 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9v56\" (UniqueName: \"kubernetes.io/projected/f51320f2-d5d7-4d3a-a23a-efbe6290fe51-kube-api-access-b9v56\") pod \"olm-operator-6b444d44fb-v8nrr\" (UID: \"f51320f2-d5d7-4d3a-a23a-efbe6290fe51\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.805905 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvzbl\" (UniqueName: \"kubernetes.io/projected/a8582aa5-89aa-4a01-b168-22605edaf7cb-kube-api-access-xvzbl\") pod \"service-ca-9c57cc56f-kmw7r\" (UID: \"a8582aa5-89aa-4a01-b168-22605edaf7cb\") " pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.809739 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.810051 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.822678 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.843351 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bp6jh\" (UniqueName: \"kubernetes.io/projected/607b13e2-3423-46e7-938c-cb7ad263e017-kube-api-access-bp6jh\") pod \"catalog-operator-68c6474976-mt9qf\" (UID: \"607b13e2-3423-46e7-938c-cb7ad263e017\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.845658 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-t6w5h" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.850538 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r655x\" (UniqueName: \"kubernetes.io/projected/4bdf9ed1-c4f3-4364-866b-ee6098476ab1-kube-api-access-r655x\") pod \"migrator-59844c95c7-298vs\" (UID: \"4bdf9ed1-c4f3-4364-866b-ee6098476ab1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.923676 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hxzm\" (UniqueName: \"kubernetes.io/projected/610b97c0-7fa1-4c6b-bfb8-2247491aae21-kube-api-access-6hxzm\") pod \"marketplace-operator-79b997595-g4hs6\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.927064 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwjsj\" (UniqueName: \"kubernetes.io/projected/d566f76d-7861-4304-b121-22fddb254188-kube-api-access-dwjsj\") pod \"multus-admission-controller-857f4d67dd-w89sw\" (UID: \"d566f76d-7861-4304-b121-22fddb254188\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.931702 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22kld\" (UniqueName: \"kubernetes.io/projected/df4eacbc-4ebc-49ec-ac29-7581f96e799b-kube-api-access-22kld\") pod \"service-ca-operator-777779d784-wksjf\" (UID: \"df4eacbc-4ebc-49ec-ac29-7581f96e799b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.936546 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.937043 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.937519 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.937822 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.945225 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m"] Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.975680 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mc647" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.994422 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vc55\" (UniqueName: \"kubernetes.io/projected/3c7843cb-a7a2-4db4-b244-f88476448291-kube-api-access-5vc55\") pod \"collect-profiles-29323995-msb78\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:26:00 crc kubenswrapper[4636]: I1002 21:26:00.995488 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mll24\" (UniqueName: \"kubernetes.io/projected/16abd9f8-9089-4ecd-8314-06a470d37b8a-kube-api-access-mll24\") pod \"control-plane-machine-set-operator-78cbb6b69f-7t4m5\" (UID: \"16abd9f8-9089-4ecd-8314-06a470d37b8a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.024768 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.047712 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.048940 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80cb5199-53bb-49db-89ff-28ada1ddec9f-config\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.048988 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b92378a3-c9aa-4e73-8336-fdd168f717f0-webhook-cert\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049044 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk8hq\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-kube-api-access-wk8hq\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049074 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b92378a3-c9aa-4e73-8336-fdd168f717f0-tmpfs\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049101 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-registry-tls\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049143 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-registry-certificates\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049220 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff8dec51-9f81-4a04-867e-6a610ddf12f7-config\") pod \"kube-apiserver-operator-766d6c64bb-7br4x\" (UID: \"ff8dec51-9f81-4a04-867e-6a610ddf12f7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049259 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/80cb5199-53bb-49db-89ff-28ada1ddec9f-etcd-client\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049314 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff8dec51-9f81-4a04-867e-6a610ddf12f7-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7br4x\" (UID: \"ff8dec51-9f81-4a04-867e-6a610ddf12f7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049380 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-trusted-ca\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049451 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/678df1e2-1565-4186-9221-80dac59e28aa-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049490 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049533 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80cb5199-53bb-49db-89ff-28ada1ddec9f-serving-cert\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049578 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/80cb5199-53bb-49db-89ff-28ada1ddec9f-etcd-ca\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.049620 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk2qc\" (UniqueName: \"kubernetes.io/projected/b92378a3-c9aa-4e73-8336-fdd168f717f0-kube-api-access-fk2qc\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: E1002 21:26:01.058803 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:01.558736493 +0000 UTC m=+152.881744512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.059428 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.064218 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-852rh\" (UniqueName: \"kubernetes.io/projected/80cb5199-53bb-49db-89ff-28ada1ddec9f-kube-api-access-852rh\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.064707 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/678df1e2-1565-4186-9221-80dac59e28aa-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.064954 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b92378a3-c9aa-4e73-8336-fdd168f717f0-apiservice-cert\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.065283 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-bound-sa-token\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.065301 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff8dec51-9f81-4a04-867e-6a610ddf12f7-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7br4x\" (UID: \"ff8dec51-9f81-4a04-867e-6a610ddf12f7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.065626 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/80cb5199-53bb-49db-89ff-28ada1ddec9f-etcd-service-ca\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.075604 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.099971 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn"] Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.105559 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l"] Oct 02 21:26:01 crc kubenswrapper[4636]: E1002 21:26:01.110392 4636 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf2410ff_f182_4416_ba76_49b77dfcce3a.slice/crio-e6fb00729f35a9823c5e07589e066dd0f4d1e230a650b72504bcf4578291a7e6.scope\": RecentStats: unable to find data in memory cache]" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.115017 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" Oct 02 21:26:01 crc kubenswrapper[4636]: W1002 21:26:01.119596 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a4a1b44_649f_4956_81fc_8be2fb503d7b.slice/crio-097fa491d71bd541ffc6c91fd5d4340a1be061389e2d8adaa7d407d79336e759 WatchSource:0}: Error finding container 097fa491d71bd541ffc6c91fd5d4340a1be061389e2d8adaa7d407d79336e759: Status 404 returned error can't find the container with id 097fa491d71bd541ffc6c91fd5d4340a1be061389e2d8adaa7d407d79336e759 Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.130354 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.133265 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lnls5"] Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.138314 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-497mz"] Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.138587 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.168488 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.168694 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-plugins-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: E1002 21:26:01.169509 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:01.669413087 +0000 UTC m=+152.992421146 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.169988 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/80cb5199-53bb-49db-89ff-28ada1ddec9f-etcd-ca\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.170563 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/80cb5199-53bb-49db-89ff-28ada1ddec9f-etcd-ca\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.172919 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk2qc\" (UniqueName: \"kubernetes.io/projected/b92378a3-c9aa-4e73-8336-fdd168f717f0-kube-api-access-fk2qc\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.173386 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-csi-data-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.179130 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-852rh\" (UniqueName: \"kubernetes.io/projected/80cb5199-53bb-49db-89ff-28ada1ddec9f-kube-api-access-852rh\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.179361 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/678df1e2-1565-4186-9221-80dac59e28aa-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.180243 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-registration-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.180133 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/678df1e2-1565-4186-9221-80dac59e28aa-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.180492 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b92378a3-c9aa-4e73-8336-fdd168f717f0-apiservice-cert\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.181324 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dbebd2af-1a31-45fc-a249-59bbfbea6d85-cert\") pod \"ingress-canary-5r2lm\" (UID: \"dbebd2af-1a31-45fc-a249-59bbfbea6d85\") " pod="openshift-ingress-canary/ingress-canary-5r2lm" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.181693 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvtcs\" (UniqueName: \"kubernetes.io/projected/dbebd2af-1a31-45fc-a249-59bbfbea6d85-kube-api-access-qvtcs\") pod \"ingress-canary-5r2lm\" (UID: \"dbebd2af-1a31-45fc-a249-59bbfbea6d85\") " pod="openshift-ingress-canary/ingress-canary-5r2lm" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.183589 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-socket-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.183879 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/afe5eca4-cf62-4af8-a0b9-278fe98dd6b8-certs\") pod \"machine-config-server-zkhsq\" (UID: \"afe5eca4-cf62-4af8-a0b9-278fe98dd6b8\") " pod="openshift-machine-config-operator/machine-config-server-zkhsq" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.183985 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-bound-sa-token\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.184064 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff8dec51-9f81-4a04-867e-6a610ddf12f7-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7br4x\" (UID: \"ff8dec51-9f81-4a04-867e-6a610ddf12f7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.184194 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/80cb5199-53bb-49db-89ff-28ada1ddec9f-etcd-service-ca\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.184285 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80cb5199-53bb-49db-89ff-28ada1ddec9f-config\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.184359 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b92378a3-c9aa-4e73-8336-fdd168f717f0-webhook-cert\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.184473 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk8hq\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-kube-api-access-wk8hq\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.184546 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b92378a3-c9aa-4e73-8336-fdd168f717f0-tmpfs\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.184632 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-registry-tls\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.186194 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx2rc\" (UniqueName: \"kubernetes.io/projected/afe5eca4-cf62-4af8-a0b9-278fe98dd6b8-kube-api-access-mx2rc\") pod \"machine-config-server-zkhsq\" (UID: \"afe5eca4-cf62-4af8-a0b9-278fe98dd6b8\") " pod="openshift-machine-config-operator/machine-config-server-zkhsq" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.186338 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-registry-certificates\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.186450 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-mountpoint-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.186810 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff8dec51-9f81-4a04-867e-6a610ddf12f7-config\") pod \"kube-apiserver-operator-766d6c64bb-7br4x\" (UID: \"ff8dec51-9f81-4a04-867e-6a610ddf12f7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.186892 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95wbb\" (UniqueName: \"kubernetes.io/projected/1c1a896f-2199-458b-9922-932040317faa-kube-api-access-95wbb\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.187118 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/80cb5199-53bb-49db-89ff-28ada1ddec9f-etcd-client\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.187252 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff8dec51-9f81-4a04-867e-6a610ddf12f7-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7br4x\" (UID: \"ff8dec51-9f81-4a04-867e-6a610ddf12f7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.188872 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-registry-certificates\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.202678 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b92378a3-c9aa-4e73-8336-fdd168f717f0-apiservice-cert\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.205177 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b92378a3-c9aa-4e73-8336-fdd168f717f0-tmpfs\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.210718 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff8dec51-9f81-4a04-867e-6a610ddf12f7-config\") pod \"kube-apiserver-operator-766d6c64bb-7br4x\" (UID: \"ff8dec51-9f81-4a04-867e-6a610ddf12f7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.213181 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/80cb5199-53bb-49db-89ff-28ada1ddec9f-etcd-service-ca\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.213805 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80cb5199-53bb-49db-89ff-28ada1ddec9f-config\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.217445 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-registry-tls\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.220905 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-trusted-ca\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.223497 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/80cb5199-53bb-49db-89ff-28ada1ddec9f-etcd-client\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.219396 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-trusted-ca\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.226091 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/678df1e2-1565-4186-9221-80dac59e28aa-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.227420 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/afe5eca4-cf62-4af8-a0b9-278fe98dd6b8-node-bootstrap-token\") pod \"machine-config-server-zkhsq\" (UID: \"afe5eca4-cf62-4af8-a0b9-278fe98dd6b8\") " pod="openshift-machine-config-operator/machine-config-server-zkhsq" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.227562 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.227985 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80cb5199-53bb-49db-89ff-28ada1ddec9f-serving-cert\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: E1002 21:26:01.228617 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:01.728602332 +0000 UTC m=+153.051610351 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.230614 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk2qc\" (UniqueName: \"kubernetes.io/projected/b92378a3-c9aa-4e73-8336-fdd168f717f0-kube-api-access-fk2qc\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.234569 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/678df1e2-1565-4186-9221-80dac59e28aa-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.238858 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff8dec51-9f81-4a04-867e-6a610ddf12f7-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7br4x\" (UID: \"ff8dec51-9f81-4a04-867e-6a610ddf12f7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.244399 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b92378a3-c9aa-4e73-8336-fdd168f717f0-webhook-cert\") pod \"packageserver-d55dfcdfc-cppbr\" (UID: \"b92378a3-c9aa-4e73-8336-fdd168f717f0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.268847 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80cb5199-53bb-49db-89ff-28ada1ddec9f-serving-cert\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.295849 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-852rh\" (UniqueName: \"kubernetes.io/projected/80cb5199-53bb-49db-89ff-28ada1ddec9f-kube-api-access-852rh\") pod \"etcd-operator-b45778765-xfzw2\" (UID: \"80cb5199-53bb-49db-89ff-28ada1ddec9f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.314813 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.355945 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.356191 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/afe5eca4-cf62-4af8-a0b9-278fe98dd6b8-certs\") pod \"machine-config-server-zkhsq\" (UID: \"afe5eca4-cf62-4af8-a0b9-278fe98dd6b8\") " pod="openshift-machine-config-operator/machine-config-server-zkhsq" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.356253 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx2rc\" (UniqueName: \"kubernetes.io/projected/afe5eca4-cf62-4af8-a0b9-278fe98dd6b8-kube-api-access-mx2rc\") pod \"machine-config-server-zkhsq\" (UID: \"afe5eca4-cf62-4af8-a0b9-278fe98dd6b8\") " pod="openshift-machine-config-operator/machine-config-server-zkhsq" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.356277 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-mountpoint-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.356306 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95wbb\" (UniqueName: \"kubernetes.io/projected/1c1a896f-2199-458b-9922-932040317faa-kube-api-access-95wbb\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.356343 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/afe5eca4-cf62-4af8-a0b9-278fe98dd6b8-node-bootstrap-token\") pod \"machine-config-server-zkhsq\" (UID: \"afe5eca4-cf62-4af8-a0b9-278fe98dd6b8\") " pod="openshift-machine-config-operator/machine-config-server-zkhsq" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.356381 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-plugins-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.356412 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-csi-data-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.356444 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-registration-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.356469 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dbebd2af-1a31-45fc-a249-59bbfbea6d85-cert\") pod \"ingress-canary-5r2lm\" (UID: \"dbebd2af-1a31-45fc-a249-59bbfbea6d85\") " pod="openshift-ingress-canary/ingress-canary-5r2lm" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.356495 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvtcs\" (UniqueName: \"kubernetes.io/projected/dbebd2af-1a31-45fc-a249-59bbfbea6d85-kube-api-access-qvtcs\") pod \"ingress-canary-5r2lm\" (UID: \"dbebd2af-1a31-45fc-a249-59bbfbea6d85\") " pod="openshift-ingress-canary/ingress-canary-5r2lm" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.356518 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-socket-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.360492 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff8dec51-9f81-4a04-867e-6a610ddf12f7-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7br4x\" (UID: \"ff8dec51-9f81-4a04-867e-6a610ddf12f7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.364375 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/afe5eca4-cf62-4af8-a0b9-278fe98dd6b8-node-bootstrap-token\") pod \"machine-config-server-zkhsq\" (UID: \"afe5eca4-cf62-4af8-a0b9-278fe98dd6b8\") " pod="openshift-machine-config-operator/machine-config-server-zkhsq" Oct 02 21:26:01 crc kubenswrapper[4636]: E1002 21:26:01.364563 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:01.864535512 +0000 UTC m=+153.187543521 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.369359 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/afe5eca4-cf62-4af8-a0b9-278fe98dd6b8-certs\") pod \"machine-config-server-zkhsq\" (UID: \"afe5eca4-cf62-4af8-a0b9-278fe98dd6b8\") " pod="openshift-machine-config-operator/machine-config-server-zkhsq" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.369879 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-mountpoint-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.370794 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk8hq\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-kube-api-access-wk8hq\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.370874 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-registration-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.370919 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-plugins-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.370983 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-csi-data-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.376626 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.388242 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.390921 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/1c1a896f-2199-458b-9922-932040317faa-socket-dir\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.392810 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95wbb\" (UniqueName: \"kubernetes.io/projected/1c1a896f-2199-458b-9922-932040317faa-kube-api-access-95wbb\") pod \"csi-hostpathplugin-qj2jd\" (UID: \"1c1a896f-2199-458b-9922-932040317faa\") " pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.411619 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx2rc\" (UniqueName: \"kubernetes.io/projected/afe5eca4-cf62-4af8-a0b9-278fe98dd6b8-kube-api-access-mx2rc\") pod \"machine-config-server-zkhsq\" (UID: \"afe5eca4-cf62-4af8-a0b9-278fe98dd6b8\") " pod="openshift-machine-config-operator/machine-config-server-zkhsq" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.470797 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-bound-sa-token\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.471111 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: E1002 21:26:01.471483 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:01.971466501 +0000 UTC m=+153.294474520 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.478674 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dbebd2af-1a31-45fc-a249-59bbfbea6d85-cert\") pod \"ingress-canary-5r2lm\" (UID: \"dbebd2af-1a31-45fc-a249-59bbfbea6d85\") " pod="openshift-ingress-canary/ingress-canary-5r2lm" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.481028 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.481297 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-zkhsq" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.497593 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvtcs\" (UniqueName: \"kubernetes.io/projected/dbebd2af-1a31-45fc-a249-59bbfbea6d85-kube-api-access-qvtcs\") pod \"ingress-canary-5r2lm\" (UID: \"dbebd2af-1a31-45fc-a249-59bbfbea6d85\") " pod="openshift-ingress-canary/ingress-canary-5r2lm" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.559034 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5r2lm" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.562099 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" event={"ID":"b599dd60-130f-4b26-b87d-7df6b14d2d7e","Type":"ContainerStarted","Data":"9a736331a65b122ae7ab81e1a73e0f01c2ae449ef296a2f895dfcb7ab19824f2"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.572154 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:01 crc kubenswrapper[4636]: E1002 21:26:01.572406 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:02.072392173 +0000 UTC m=+153.395400192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.572422 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-t6w5h"] Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.580845 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" event={"ID":"776d4066-e52b-45f4-8d1c-eaad48feabc9","Type":"ContainerStarted","Data":"c62e18b2e27bc31fa41f3c89441137baa4f350ee9798b84dc82a6a8bc345b082"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.655450 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" event={"ID":"d53d9154-d213-435c-9b1c-ae798dcfc3e9","Type":"ContainerStarted","Data":"914dcc4e949e71f01fb72080f249f4cd4cb248a1e5be910882d74196c9a76910"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.673139 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr"] Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.673862 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: E1002 21:26:01.675765 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:02.175738742 +0000 UTC m=+153.498746761 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.687134 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" event={"ID":"878328eb-ad2c-4cd7-aff4-19d12588b46d","Type":"ContainerStarted","Data":"2ffe92f0d107e2965f95f3d1fa067c314a67a93d5e0c4d09b6feaea7adf455d6"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.742713 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wdwwc" event={"ID":"b4ddd281-18ba-41ec-b5a6-788b9f5a942e","Type":"ContainerStarted","Data":"b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.775519 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.776665 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr"] Oct 02 21:26:01 crc kubenswrapper[4636]: E1002 21:26:01.776856 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:02.276838928 +0000 UTC m=+153.599846947 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.802894 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" event={"ID":"9a4a1b44-649f-4956-81fc-8be2fb503d7b","Type":"ContainerStarted","Data":"097fa491d71bd541ffc6c91fd5d4340a1be061389e2d8adaa7d407d79336e759"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.859909 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm"] Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.871983 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" event={"ID":"1c0a8f40-cffd-4cac-b2c5-4d76fa89f611","Type":"ContainerStarted","Data":"ee3f3d2c27db9ea86a12651883dc9ffac9ad78eb9eef024e38cd01842ba8ef69"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.876874 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:01 crc kubenswrapper[4636]: E1002 21:26:01.879126 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:02.379109997 +0000 UTC m=+153.702118016 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.891645 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" event={"ID":"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023","Type":"ContainerStarted","Data":"27dd119be45c729f0ef1f9d544530e694fb5d6c69d73bc22d8b374af29c72189"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.904377 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" event={"ID":"31f15e24-8b0d-4f12-b71f-29565e66b0ca","Type":"ContainerStarted","Data":"67ab024280a9e12fd627971e568b4717c624bc6c74ce53cb95673bfef87a4a33"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.925492 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mc647"] Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.926172 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" event={"ID":"b2766a44-24c7-48ff-943f-3a225eb74dec","Type":"ContainerStarted","Data":"51834171d1cf69c63339c42bda8a6f1ed12f45ef9fa3e94ed419be3505f2d7d1"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.929551 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-497mz" event={"ID":"14baf1ce-9a21-4232-a0b0-3ad606d31e45","Type":"ContainerStarted","Data":"bd43a5dabd8dc9c12cfdbab82151aa55cfb2bec843cce9afba1fb2bf3ee36174"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.953183 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q"] Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.964372 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" event={"ID":"6252e8b9-5b10-4dbd-9f02-50e0a5e47233","Type":"ContainerStarted","Data":"aaf77fb1c3f589600c4cbc9b16d3935f7854e70900f2d50ed2b5b09e6f13e264"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.970630 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-n26j2" event={"ID":"4dec7d6d-2309-486c-bebe-19dce69f40d8","Type":"ContainerStarted","Data":"b9904a7732ab69827a5ae0e169ac64349ddc67f98df6d2c6c969c270703be746"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.971912 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-n26j2" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.977724 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-gbfdn" event={"ID":"5392cef5-bba8-474f-a355-a505f056b6a5","Type":"ContainerStarted","Data":"f6e2da0848e43f3935c1d3bb766b12a8e9130ff2a8dd6cb409166f910cec7aef"} Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.978192 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:01 crc kubenswrapper[4636]: E1002 21:26:01.978972 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:02.478946528 +0000 UTC m=+153.801954547 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.979787 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.979831 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.981035 4636 generic.go:334] "Generic (PLEG): container finished" podID="cf2410ff-f182-4416-ba76-49b77dfcce3a" containerID="e6fb00729f35a9823c5e07589e066dd0f4d1e230a650b72504bcf4578291a7e6" exitCode=0 Oct 02 21:26:01 crc kubenswrapper[4636]: I1002 21:26:01.981163 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" event={"ID":"cf2410ff-f182-4416-ba76-49b77dfcce3a","Type":"ContainerDied","Data":"e6fb00729f35a9823c5e07589e066dd0f4d1e230a650b72504bcf4578291a7e6"} Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.029675 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kmw7r"] Oct 02 21:26:02 crc kubenswrapper[4636]: W1002 21:26:02.096805 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod700f2044_4ee9_4adb_ab32_f41e84362b15.slice/crio-8f7ab9d683f992b52c349c4c1d8926d0a331cd02613cc0925d6433106e424e37 WatchSource:0}: Error finding container 8f7ab9d683f992b52c349c4c1d8926d0a331cd02613cc0925d6433106e424e37: Status 404 returned error can't find the container with id 8f7ab9d683f992b52c349c4c1d8926d0a331cd02613cc0925d6433106e424e37 Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.098986 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.109896 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" event={"ID":"458c1259-882d-4c4c-a4b6-e2c93bb9fe70","Type":"ContainerStarted","Data":"4eb3d6674ad3ad3b2db53c501eabd8826c64920a983140aea544aec401ac0537"} Oct 02 21:26:02 crc kubenswrapper[4636]: E1002 21:26:02.120126 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:02.620100625 +0000 UTC m=+153.943108634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.122741 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" event={"ID":"d6faf1e4-6165-479e-95a2-25e37852f252","Type":"ContainerStarted","Data":"8e52ca2d47d438cb346a784135a2e4288c9d8340d2c701cdd79f3036ac31cf04"} Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.144675 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.145273 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.145311 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.150883 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf"] Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.227102 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:02 crc kubenswrapper[4636]: E1002 21:26:02.229592 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:02.729562485 +0000 UTC m=+154.052570504 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.264214 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" event={"ID":"e3d86ad9-1a43-4146-aa2b-13a70e8acccb","Type":"ContainerStarted","Data":"94d0b76623374ec23bdb06baf870b44b0ee26990b8ffaf9ba9ad5ccaf49ebb63"} Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.264810 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.274798 4636 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-rklgn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.274849 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" podUID="ffb6abfd-3376-4908-b388-7c398e36f986" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.276324 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tkzhx" podStartSLOduration=132.276303352 podStartE2EDuration="2m12.276303352s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:02.274173392 +0000 UTC m=+153.597181411" watchObservedRunningTime="2025-10-02 21:26:02.276303352 +0000 UTC m=+153.599311371" Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.302501 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d"] Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.329393 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:02 crc kubenswrapper[4636]: E1002 21:26:02.329852 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:02.829836628 +0000 UTC m=+154.152844647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.405272 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-wdwwc" podStartSLOduration=132.405250206 podStartE2EDuration="2m12.405250206s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:02.402494959 +0000 UTC m=+153.725502978" watchObservedRunningTime="2025-10-02 21:26:02.405250206 +0000 UTC m=+153.728258225" Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.432534 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:02 crc kubenswrapper[4636]: E1002 21:26:02.437028 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:02.937005474 +0000 UTC m=+154.260013493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.542070 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:02 crc kubenswrapper[4636]: E1002 21:26:02.543069 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:03.043057139 +0000 UTC m=+154.366065158 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.648649 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:02 crc kubenswrapper[4636]: E1002 21:26:02.649139 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:03.149118114 +0000 UTC m=+154.472126133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.654189 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78"] Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.762062 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:02 crc kubenswrapper[4636]: E1002 21:26:02.764277 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:03.264257393 +0000 UTC m=+154.587265412 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.875040 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:02 crc kubenswrapper[4636]: E1002 21:26:02.875155 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:03.375120902 +0000 UTC m=+154.698128921 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.875361 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:02 crc kubenswrapper[4636]: E1002 21:26:02.875795 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:03.37577455 +0000 UTC m=+154.698782569 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.960707 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-w89sw"] Oct 02 21:26:02 crc kubenswrapper[4636]: I1002 21:26:02.977536 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:02 crc kubenswrapper[4636]: E1002 21:26:02.989227 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:03.489127889 +0000 UTC m=+154.812135908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.000772 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5"] Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.035559 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs"] Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.088030 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:03 crc kubenswrapper[4636]: E1002 21:26:03.089991 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:03.589977439 +0000 UTC m=+154.912985458 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.121692 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" podStartSLOduration=133.121662555 podStartE2EDuration="2m13.121662555s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:03.090156794 +0000 UTC m=+154.413164813" watchObservedRunningTime="2025-10-02 21:26:03.121662555 +0000 UTC m=+154.444670574" Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.124089 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wksjf"] Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.150866 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:03 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:03 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:03 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.151384 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.191595 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:03 crc kubenswrapper[4636]: E1002 21:26:03.192103 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:03.692086744 +0000 UTC m=+155.015094763 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.220672 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g4hs6"] Oct 02 21:26:03 crc kubenswrapper[4636]: W1002 21:26:03.293640 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4bdf9ed1_c4f3_4364_866b_ee6098476ab1.slice/crio-d3a12a1aaf8e85c40793957ee53df8032bcc3a652d3d1a6cd1d21cbd5931d92d WatchSource:0}: Error finding container d3a12a1aaf8e85c40793957ee53df8032bcc3a652d3d1a6cd1d21cbd5931d92d: Status 404 returned error can't find the container with id d3a12a1aaf8e85c40793957ee53df8032bcc3a652d3d1a6cd1d21cbd5931d92d Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.294635 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:03 crc kubenswrapper[4636]: E1002 21:26:03.295106 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:03.795089123 +0000 UTC m=+155.118097142 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.297499 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fmtgx" podStartSLOduration=133.29747892 podStartE2EDuration="2m13.29747892s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:03.251148335 +0000 UTC m=+154.574156374" watchObservedRunningTime="2025-10-02 21:26:03.29747892 +0000 UTC m=+154.620486939" Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.343870 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" event={"ID":"31f15e24-8b0d-4f12-b71f-29565e66b0ca","Type":"ContainerStarted","Data":"7912ca9212a688fe501d19ac35b6f2a440e0821c0d0780106ab175a3f9386197"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.352502 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" event={"ID":"607b13e2-3423-46e7-938c-cb7ad263e017","Type":"ContainerStarted","Data":"a2f6b6b174f2d41001180d460c5dd14ef46eb1580bd3c7d684620b77b8f8c120"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.394063 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" event={"ID":"a8582aa5-89aa-4a01-b168-22605edaf7cb","Type":"ContainerStarted","Data":"0ea3e24872bc4dba426863f294b52292b2e6991c77683c461e70d6133811a67e"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.396940 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-n26j2" podStartSLOduration=133.39690391 podStartE2EDuration="2m13.39690391s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:03.394462041 +0000 UTC m=+154.717470070" watchObservedRunningTime="2025-10-02 21:26:03.39690391 +0000 UTC m=+154.719911929" Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.397729 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:03 crc kubenswrapper[4636]: E1002 21:26:03.398904 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:03.898886875 +0000 UTC m=+155.221894894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.424627 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" event={"ID":"9a4a1b44-649f-4956-81fc-8be2fb503d7b","Type":"ContainerStarted","Data":"a9d3444358f9f7ffd7f07c6c2b8be7d8d674cab3d8f9968ab7354bb2ebe3a0c4"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.437275 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-gbfdn" podStartSLOduration=133.437251698 podStartE2EDuration="2m13.437251698s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:03.43520147 +0000 UTC m=+154.758209489" watchObservedRunningTime="2025-10-02 21:26:03.437251698 +0000 UTC m=+154.760259717" Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.455166 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" event={"ID":"1c6c38fc-98a5-4280-ab21-f967146f3edc","Type":"ContainerStarted","Data":"673ec0086e287fd33cd1db596ab9bf844b00481198f3289f490cf9eac375fce3"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.471531 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" event={"ID":"f51320f2-d5d7-4d3a-a23a-efbe6290fe51","Type":"ContainerStarted","Data":"fa601b85fd6cde7035e38d944dfde4ae3d93f39fbaaeebe9e41449cba77ea7d3"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.502740 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:03 crc kubenswrapper[4636]: E1002 21:26:03.504378 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:04.004363194 +0000 UTC m=+155.327371213 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.532119 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" event={"ID":"c385fb47-f4e5-4934-a44e-6ee2caed0450","Type":"ContainerStarted","Data":"9aa8dd1402a142c3bf67ac194205389093f022a9f114b9ddefc337d77dc931f1"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.543511 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mc647" event={"ID":"97a981ff-12be-4043-afb9-d8c9fa7ef9d5","Type":"ContainerStarted","Data":"8542a428896580e94a6390b46086edc4da7daf0d339aaf9b4ee5f9e60244f49d"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.547526 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr"] Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.558321 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" event={"ID":"3c7843cb-a7a2-4db4-b244-f88476448291","Type":"ContainerStarted","Data":"a1a7a83535685a0a794962bafc89f26b0a46a12a914742ac69e7f656541e67c1"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.559295 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lsk9m" podStartSLOduration=133.559269699 podStartE2EDuration="2m13.559269699s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:03.556841011 +0000 UTC m=+154.879849030" watchObservedRunningTime="2025-10-02 21:26:03.559269699 +0000 UTC m=+154.882277708" Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.588233 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mnt7l" podStartSLOduration=133.588190696 podStartE2EDuration="2m13.588190696s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:03.575197523 +0000 UTC m=+154.898205532" watchObservedRunningTime="2025-10-02 21:26:03.588190696 +0000 UTC m=+154.911198715" Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.598852 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" event={"ID":"b599dd60-130f-4b26-b87d-7df6b14d2d7e","Type":"ContainerStarted","Data":"2a9cfd46f1221554c0f15390a40765d44f968c984f3bca8efb346b67cd4de4ef"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.618650 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:03 crc kubenswrapper[4636]: E1002 21:26:03.627488 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:04.127454304 +0000 UTC m=+155.450462313 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.627619 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:03 crc kubenswrapper[4636]: E1002 21:26:03.630881 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:04.130824628 +0000 UTC m=+155.453832647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.644946 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" event={"ID":"eb9dd7b6-da00-4ae2-aa0f-0e8e2f1f9023","Type":"ContainerStarted","Data":"59917281ce60cf0bcd3f2aa1a5be0c60c871d4e710feee76ff9b5504b59d3e24"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.650153 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" event={"ID":"6154a418-bf83-481b-9e04-e870a95548db","Type":"ContainerStarted","Data":"e5b984d1be428fa9aea94c5d5a30c8e7676bea60ee7812a4ce08ba250dfa52c2"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.694336 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" podStartSLOduration=133.694309873 podStartE2EDuration="2m13.694309873s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:03.635795107 +0000 UTC m=+154.958803126" watchObservedRunningTime="2025-10-02 21:26:03.694309873 +0000 UTC m=+155.017317882" Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.748013 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5r2lm"] Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.748080 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" event={"ID":"6fe85c5e-41ae-4c6a-ad16-21f46ccee66e","Type":"ContainerStarted","Data":"1367a2d5071c48a4440a79e51f6f8e5fe16c9bfbf5142ae669557776f55145c7"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.749302 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:03 crc kubenswrapper[4636]: E1002 21:26:03.750841 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:04.250811952 +0000 UTC m=+155.573819971 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.766261 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" event={"ID":"700f2044-4ee9-4adb-ab32-f41e84362b15","Type":"ContainerStarted","Data":"8f7ab9d683f992b52c349c4c1d8926d0a331cd02613cc0925d6433106e424e37"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.766887 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzldq" podStartSLOduration=133.766861271 podStartE2EDuration="2m13.766861271s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:03.673345097 +0000 UTC m=+154.996353126" watchObservedRunningTime="2025-10-02 21:26:03.766861271 +0000 UTC m=+155.089869290" Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.769628 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-xfzw2"] Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.803844 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.804184 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:26:03 crc kubenswrapper[4636]: W1002 21:26:03.819898 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb92378a3_c9aa_4e73_8336_fdd168f717f0.slice/crio-1bd131efb464e0f7e43d8e2e3f6853b42a9968c2a3ed69d5d60b93c8b67a8bd1 WatchSource:0}: Error finding container 1bd131efb464e0f7e43d8e2e3f6853b42a9968c2a3ed69d5d60b93c8b67a8bd1: Status 404 returned error can't find the container with id 1bd131efb464e0f7e43d8e2e3f6853b42a9968c2a3ed69d5d60b93c8b67a8bd1 Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.842087 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" event={"ID":"d566f76d-7861-4304-b121-22fddb254188","Type":"ContainerStarted","Data":"d053fb98a60d9068e2a7227c22bff27b4db3ff488538c5ae3ffd6f898d8af88f"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.852256 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:03 crc kubenswrapper[4636]: E1002 21:26:03.852662 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:04.35264601 +0000 UTC m=+155.675654029 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.948088 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" event={"ID":"6252e8b9-5b10-4dbd-9f02-50e0a5e47233","Type":"ContainerStarted","Data":"ccebc42ea20619b2b5607b4fa816b009d888e6a76e25e82aaa81396a21192835"} Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.956233 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:03 crc kubenswrapper[4636]: E1002 21:26:03.956595 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:04.456579595 +0000 UTC m=+155.779587614 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:03 crc kubenswrapper[4636]: I1002 21:26:03.974701 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-zkhsq" event={"ID":"afe5eca4-cf62-4af8-a0b9-278fe98dd6b8","Type":"ContainerStarted","Data":"cbcfb4d07954b052dc2523f7a11540f9fa833eb517e399b1cfe7fac9ce19eb7f"} Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:03.999090 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" event={"ID":"b2766a44-24c7-48ff-943f-3a225eb74dec","Type":"ContainerStarted","Data":"c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b"} Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.000445 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.020254 4636 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-9759p container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.020308 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" podUID="b2766a44-24c7-48ff-943f-3a225eb74dec" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.040055 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-t6w5h" event={"ID":"67eaa966-b8bc-427c-a47f-1314b84d74cc","Type":"ContainerStarted","Data":"51fbac0f12cfd1c5f8f2d71af52d42a57d2dea6b70db2a46de1a5b98fb61188b"} Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.062307 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.062569 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5" event={"ID":"16abd9f8-9089-4ecd-8314-06a470d37b8a","Type":"ContainerStarted","Data":"c0e362fb8a9f4a24043f3babfb52903701d5a5aaf468da2a5f11d150e6ee4954"} Oct 02 21:26:04 crc kubenswrapper[4636]: E1002 21:26:04.062617 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:04.562606029 +0000 UTC m=+155.885614048 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.068421 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" event={"ID":"776d4066-e52b-45f4-8d1c-eaad48feabc9","Type":"ContainerStarted","Data":"5d517207bac2087b18bf8dd4464f0b4fe8ab8a009301b9fa1aa8e5967733fa08"} Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.069497 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.079836 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-84tsd" podStartSLOduration=134.07981646 podStartE2EDuration="2m14.07981646s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:04.069960715 +0000 UTC m=+155.392968734" watchObservedRunningTime="2025-10-02 21:26:04.07981646 +0000 UTC m=+155.402824479" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.080413 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dhfb5" podStartSLOduration=134.080406747 podStartE2EDuration="2m14.080406747s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:03.777509549 +0000 UTC m=+155.100517568" watchObservedRunningTime="2025-10-02 21:26:04.080406747 +0000 UTC m=+155.403414766" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.084567 4636 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-76lrr container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.28:6443/healthz\": dial tcp 10.217.0.28:6443: connect: connection refused" start-of-body= Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.084667 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" podUID="776d4066-e52b-45f4-8d1c-eaad48feabc9" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.28:6443/healthz\": dial tcp 10.217.0.28:6443: connect: connection refused" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.104111 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x"] Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.128570 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qj2jd"] Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.134916 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" event={"ID":"d6faf1e4-6165-479e-95a2-25e37852f252","Type":"ContainerStarted","Data":"92c750b232b7328e93934403a7c8c5c04281b953a88133ed7b0c8d76a6e9a06c"} Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.140914 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" event={"ID":"d53d9154-d213-435c-9b1c-ae798dcfc3e9","Type":"ContainerStarted","Data":"0b35bb6d0f8d9ad245ca655b266dbecbb6d1f4d0e154e9cf1fae22a65061c775"} Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.141532 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.141568 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.153136 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:04 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:04 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:04 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.153184 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.162492 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" podStartSLOduration=134.162468431 podStartE2EDuration="2m14.162468431s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:04.150210538 +0000 UTC m=+155.473218557" watchObservedRunningTime="2025-10-02 21:26:04.162468431 +0000 UTC m=+155.485476450" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.174087 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:04 crc kubenswrapper[4636]: E1002 21:26:04.175554 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:04.675535466 +0000 UTC m=+155.998543485 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.192852 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.210612 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" podStartSLOduration=133.210583026 podStartE2EDuration="2m13.210583026s" podCreationTimestamp="2025-10-02 21:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:04.204395893 +0000 UTC m=+155.527403912" watchObservedRunningTime="2025-10-02 21:26:04.210583026 +0000 UTC m=+155.533591045" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.259334 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-6fxnn" podStartSLOduration=134.259313628 podStartE2EDuration="2m14.259313628s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:04.258407433 +0000 UTC m=+155.581415462" watchObservedRunningTime="2025-10-02 21:26:04.259313628 +0000 UTC m=+155.582321647" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.283184 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:04 crc kubenswrapper[4636]: E1002 21:26:04.285625 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:04.785611213 +0000 UTC m=+156.108619452 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:04 crc kubenswrapper[4636]: W1002 21:26:04.298842 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff8dec51_9f81_4a04_867e_6a610ddf12f7.slice/crio-575b310dc581b60193759c9ef144931d6e5d6b64d9f8d3e8405b210a0223e5d1 WatchSource:0}: Error finding container 575b310dc581b60193759c9ef144931d6e5d6b64d9f8d3e8405b210a0223e5d1: Status 404 returned error can't find the container with id 575b310dc581b60193759c9ef144931d6e5d6b64d9f8d3e8405b210a0223e5d1 Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.346956 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bdjjn" podStartSLOduration=134.346935138 podStartE2EDuration="2m14.346935138s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:04.344926582 +0000 UTC m=+155.667934601" watchObservedRunningTime="2025-10-02 21:26:04.346935138 +0000 UTC m=+155.669943157" Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.394059 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:04 crc kubenswrapper[4636]: E1002 21:26:04.394642 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:04.894618711 +0000 UTC m=+156.217626730 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.495365 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:04 crc kubenswrapper[4636]: E1002 21:26:04.495663 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:04.995650975 +0000 UTC m=+156.318658994 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.597854 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:04 crc kubenswrapper[4636]: E1002 21:26:04.597981 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:05.097958346 +0000 UTC m=+156.420966365 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.598173 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:04 crc kubenswrapper[4636]: E1002 21:26:04.598527 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:05.098515021 +0000 UTC m=+156.421523030 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.699141 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:04 crc kubenswrapper[4636]: E1002 21:26:04.699584 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:05.199561686 +0000 UTC m=+156.522569705 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.801078 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:04 crc kubenswrapper[4636]: E1002 21:26:04.801479 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:05.301459335 +0000 UTC m=+156.624467354 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:04 crc kubenswrapper[4636]: I1002 21:26:04.902781 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:04 crc kubenswrapper[4636]: E1002 21:26:04.903512 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:05.403495627 +0000 UTC m=+156.726503646 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.004559 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:05 crc kubenswrapper[4636]: E1002 21:26:05.004952 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:05.504935173 +0000 UTC m=+156.827943192 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.079614 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.105933 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:05 crc kubenswrapper[4636]: E1002 21:26:05.106421 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:05.60640395 +0000 UTC m=+156.929411969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.145275 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:05 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:05 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:05 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.145339 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.154660 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" event={"ID":"607b13e2-3423-46e7-938c-cb7ad263e017","Type":"ContainerStarted","Data":"e22f86ff5d3f9e4e53b1966534640009bc4c4495a726094330608cf44285d55a"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.161679 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" event={"ID":"1c6c38fc-98a5-4280-ab21-f967146f3edc","Type":"ContainerStarted","Data":"960a3b003b5370f00816fb5f7c1b9358a7505afc6b6b1954d4e6fa2026ddd0c9"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.166186 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs" event={"ID":"4bdf9ed1-c4f3-4364-866b-ee6098476ab1","Type":"ContainerStarted","Data":"d3a12a1aaf8e85c40793957ee53df8032bcc3a652d3d1a6cd1d21cbd5931d92d"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.175233 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mc647" event={"ID":"97a981ff-12be-4043-afb9-d8c9fa7ef9d5","Type":"ContainerStarted","Data":"051389652d556bb006234def47769e4e63cce3f2205bb68cb896a6935bfc6752"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.176355 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" event={"ID":"610b97c0-7fa1-4c6b-bfb8-2247491aae21","Type":"ContainerStarted","Data":"84b48518e05875738e5060078a7f07db653cc8d27b109c0b49ad94d64e25bb38"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.177699 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" event={"ID":"6154a418-bf83-481b-9e04-e870a95548db","Type":"ContainerStarted","Data":"83222b28ec1a06093015c3852f50c09461033ef1782576c0dd92ef08bf612509"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.179982 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-497mz" event={"ID":"14baf1ce-9a21-4232-a0b0-3ad606d31e45","Type":"ContainerStarted","Data":"a9b2a5ff3c112fe0a702acd4b21cc8d94c3214c62f7352d6723f8bc6c70e790e"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.180224 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.183273 4636 patch_prober.go:28] interesting pod/console-operator-58897d9998-497mz container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.183313 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-497mz" podUID="14baf1ce-9a21-4232-a0b0-3ad606d31e45" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.184279 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" event={"ID":"cf2410ff-f182-4416-ba76-49b77dfcce3a","Type":"ContainerStarted","Data":"59e55aef5e9bb751e0c33abd49b9bf0f7f73f00ceedac49406c70ebbd846c59c"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.184493 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.190970 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" event={"ID":"e3d86ad9-1a43-4146-aa2b-13a70e8acccb","Type":"ContainerDied","Data":"405cfcc839c11b0c79ff987501dde52400304aaf8f8780147b56188adf94af88"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.190508 4636 generic.go:334] "Generic (PLEG): container finished" podID="e3d86ad9-1a43-4146-aa2b-13a70e8acccb" containerID="405cfcc839c11b0c79ff987501dde52400304aaf8f8780147b56188adf94af88" exitCode=0 Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.194000 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" event={"ID":"80cb5199-53bb-49db-89ff-28ada1ddec9f","Type":"ContainerStarted","Data":"0ee9e69a475a3e44ad318eabd7cc79caa4a1fa0eecde1f469614faa3341376c2"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.196594 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" event={"ID":"df4eacbc-4ebc-49ec-ac29-7581f96e799b","Type":"ContainerStarted","Data":"90065dea453acf73f61b0d8be8ff6e0cdec13d3ed7a1103adf2c43b7a846a717"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.197636 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zr76q" podStartSLOduration=135.19762522 podStartE2EDuration="2m15.19762522s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:05.193127664 +0000 UTC m=+156.516135683" watchObservedRunningTime="2025-10-02 21:26:05.19762522 +0000 UTC m=+156.520633239" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.200630 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" event={"ID":"f51320f2-d5d7-4d3a-a23a-efbe6290fe51","Type":"ContainerStarted","Data":"aa9e052103122253dda86c2d8513c3402b7aeea2267835a3858016eaa5545cca"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.201454 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.202504 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" event={"ID":"b92378a3-c9aa-4e73-8336-fdd168f717f0","Type":"ContainerStarted","Data":"1bd131efb464e0f7e43d8e2e3f6853b42a9968c2a3ed69d5d60b93c8b67a8bd1"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.203342 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5r2lm" event={"ID":"dbebd2af-1a31-45fc-a249-59bbfbea6d85","Type":"ContainerStarted","Data":"f24686e6c4087cbe03717b563faf08d67767b28a154bae78937dfcaa273302d3"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.203619 4636 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-v8nrr container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.203657 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" podUID="f51320f2-d5d7-4d3a-a23a-efbe6290fe51" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.204581 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" event={"ID":"700f2044-4ee9-4adb-ab32-f41e84362b15","Type":"ContainerStarted","Data":"e94d561de7701b12a57282c06b98318d44e80ba753420f3e5010b4807b8cc3a7"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.208243 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:05 crc kubenswrapper[4636]: E1002 21:26:05.208608 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:05.708596537 +0000 UTC m=+157.031604556 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.208893 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" event={"ID":"1c1a896f-2199-458b-9922-932040317faa","Type":"ContainerStarted","Data":"91b5b5982e2bed7351e2a011f6dfc03e5ed8d3e7387faef58e4639437b229d0d"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.209699 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" event={"ID":"ff8dec51-9f81-4a04-867e-6a610ddf12f7","Type":"ContainerStarted","Data":"575b310dc581b60193759c9ef144931d6e5d6b64d9f8d3e8405b210a0223e5d1"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.211289 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" event={"ID":"1c0a8f40-cffd-4cac-b2c5-4d76fa89f611","Type":"ContainerStarted","Data":"85785d28ab86b0356f2e706fd94086600aa773ad5dc84a6d6b54589b26c6f874"} Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.212622 4636 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-9759p container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.212661 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" podUID="b2766a44-24c7-48ff-943f-3a225eb74dec" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.212906 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.212979 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.214201 4636 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-76lrr container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.28:6443/healthz\": dial tcp 10.217.0.28:6443: connect: connection refused" start-of-body= Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.214229 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" podUID="776d4066-e52b-45f4-8d1c-eaad48feabc9" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.28:6443/healthz\": dial tcp 10.217.0.28:6443: connect: connection refused" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.238122 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" podStartSLOduration=135.238104222 podStartE2EDuration="2m15.238104222s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:05.235313744 +0000 UTC m=+156.558321783" watchObservedRunningTime="2025-10-02 21:26:05.238104222 +0000 UTC m=+156.561112251" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.240396 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-n8ql5" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.312690 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:05 crc kubenswrapper[4636]: E1002 21:26:05.315168 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:05.815146905 +0000 UTC m=+157.138154934 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.359989 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-497mz" podStartSLOduration=135.359964128 podStartE2EDuration="2m15.359964128s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:05.339075814 +0000 UTC m=+156.662083833" watchObservedRunningTime="2025-10-02 21:26:05.359964128 +0000 UTC m=+156.682972147" Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.421499 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:05 crc kubenswrapper[4636]: E1002 21:26:05.421800 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:05.921784127 +0000 UTC m=+157.244792146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.528908 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:05 crc kubenswrapper[4636]: E1002 21:26:05.529231 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:06.02921838 +0000 UTC m=+157.352226399 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.633656 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:05 crc kubenswrapper[4636]: E1002 21:26:05.634114 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:06.134098842 +0000 UTC m=+157.457106851 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.734686 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:05 crc kubenswrapper[4636]: E1002 21:26:05.735066 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:06.235050364 +0000 UTC m=+157.558058383 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.836037 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:05 crc kubenswrapper[4636]: E1002 21:26:05.836403 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:06.336389458 +0000 UTC m=+157.659397477 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:05 crc kubenswrapper[4636]: I1002 21:26:05.937272 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:05 crc kubenswrapper[4636]: E1002 21:26:05.937986 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:06.437970037 +0000 UTC m=+157.760978046 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.039416 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.039773 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:06.539760103 +0000 UTC m=+157.862768122 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.140502 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.141230 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:06.641213209 +0000 UTC m=+157.964221228 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.147102 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:06 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:06 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:06 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.147158 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.222633 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5" event={"ID":"16abd9f8-9089-4ecd-8314-06a470d37b8a","Type":"ContainerStarted","Data":"6697aafbcf00cc2b746b7ca937c06db4c4c627aa007dab31419fff627c6eb0a7"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.229137 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" event={"ID":"1c0a8f40-cffd-4cac-b2c5-4d76fa89f611","Type":"ContainerStarted","Data":"ca4025bdf73e4abbb940cfa87f4d6d8e5b41d52930e18644fedd0190de616309"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.233559 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" event={"ID":"e3d86ad9-1a43-4146-aa2b-13a70e8acccb","Type":"ContainerStarted","Data":"818838328aaf2e9fa3891477cb927fdc9c5f5b03befa97dabea07994659fa096"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.236050 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" event={"ID":"700f2044-4ee9-4adb-ab32-f41e84362b15","Type":"ContainerStarted","Data":"8e23eff1c073f746e04b43c8222ae5f7034d706803c4497ba93b64783fb4a582"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.241634 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.242013 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:06.741999287 +0000 UTC m=+158.065007306 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.249021 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" podStartSLOduration=135.249001693 podStartE2EDuration="2m15.249001693s" podCreationTimestamp="2025-10-02 21:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:05.479441849 +0000 UTC m=+156.802449868" watchObservedRunningTime="2025-10-02 21:26:06.249001693 +0000 UTC m=+157.572009712" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.249281 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7t4m5" podStartSLOduration=136.24927585 podStartE2EDuration="2m16.24927585s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.248686734 +0000 UTC m=+157.571694783" watchObservedRunningTime="2025-10-02 21:26:06.24927585 +0000 UTC m=+157.572283869" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.261921 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" event={"ID":"a8582aa5-89aa-4a01-b168-22605edaf7cb","Type":"ContainerStarted","Data":"bd50c386a5db297500f59eaf55f9e5064f5018189c6d39d7e6e67f7151e0e0a1"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.266222 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs" event={"ID":"4bdf9ed1-c4f3-4364-866b-ee6098476ab1","Type":"ContainerStarted","Data":"dd0005195879036562ac3b736baa05b779c461fcda4845ca6edcc6086061b5d1"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.276080 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" event={"ID":"ff8dec51-9f81-4a04-867e-6a610ddf12f7","Type":"ContainerStarted","Data":"f4a3727c77c221ff874f866d9cf81df8c18c5536c6453c71691e731b7200b6eb"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.289514 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" event={"ID":"3c7843cb-a7a2-4db4-b244-f88476448291","Type":"ContainerStarted","Data":"31365e2b97feab8dab1ff10cffd56c074b257ed7b231468afb2391c66cc4ec0c"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.296645 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jc6sn" podStartSLOduration=136.296627394 podStartE2EDuration="2m16.296627394s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.296034037 +0000 UTC m=+157.619042056" watchObservedRunningTime="2025-10-02 21:26:06.296627394 +0000 UTC m=+157.619635403" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.299035 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" event={"ID":"d566f76d-7861-4304-b121-22fddb254188","Type":"ContainerStarted","Data":"18ffe0a0baa38f6675021a35f89b67de03bb11ca6623c304c7e34a44c19289c8"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.312066 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" event={"ID":"6fe85c5e-41ae-4c6a-ad16-21f46ccee66e","Type":"ContainerStarted","Data":"692ec9b68c651c628afb673cce9e878e863593dbdd5a001e30ebca12cd909d7b"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.321347 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" event={"ID":"df4eacbc-4ebc-49ec-ac29-7581f96e799b","Type":"ContainerStarted","Data":"2abb08f52c0586170e41f1303e0cf51e6b947b5b7b24959566da0567d4f4ebd3"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.332216 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" event={"ID":"610b97c0-7fa1-4c6b-bfb8-2247491aae21","Type":"ContainerStarted","Data":"0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.341531 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-t6w5h" event={"ID":"67eaa966-b8bc-427c-a47f-1314b84d74cc","Type":"ContainerStarted","Data":"9ab1c45b1729365706f0398e5e6ded537664ffdd868d1fdc85a12a93ce3cd9d1"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.342203 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.343644 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:06.843626168 +0000 UTC m=+158.166634187 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.359787 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" event={"ID":"80cb5199-53bb-49db-89ff-28ada1ddec9f","Type":"ContainerStarted","Data":"376a9d5fa3ba03f25ae1f0183c164aa1a4c917d66658c6cdd98abfa64dfc55e3"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.360897 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-kmw7r" podStartSLOduration=135.36088749 podStartE2EDuration="2m15.36088749s" podCreationTimestamp="2025-10-02 21:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.359483871 +0000 UTC m=+157.682491890" watchObservedRunningTime="2025-10-02 21:26:06.36088749 +0000 UTC m=+157.683895509" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.361919 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" podStartSLOduration=136.361914819 podStartE2EDuration="2m16.361914819s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.321090538 +0000 UTC m=+157.644098547" watchObservedRunningTime="2025-10-02 21:26:06.361914819 +0000 UTC m=+157.684922838" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.363296 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" event={"ID":"1c6c38fc-98a5-4280-ab21-f967146f3edc","Type":"ContainerStarted","Data":"12bec25b4a2f17aeea229242d634ed8f470d97b127d1e8bd3b0903bef979ae88"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.365518 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-zkhsq" event={"ID":"afe5eca4-cf62-4af8-a0b9-278fe98dd6b8","Type":"ContainerStarted","Data":"2ab53dd53c3976f041c6a59d1dc024f2d85c268fce3d1591acb71ebb0d783a45"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.368590 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" event={"ID":"b92378a3-c9aa-4e73-8336-fdd168f717f0","Type":"ContainerStarted","Data":"ff3648df7eb2759e0fb9aba69c7188e1bfe425025f03ade3fedfcdda6aa81483"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.371353 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5r2lm" event={"ID":"dbebd2af-1a31-45fc-a249-59bbfbea6d85","Type":"ContainerStarted","Data":"5a11c86b21281e53360bede3b921b1ac67db724d740581d5d10e99097d69d57e"} Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.372103 4636 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-v8nrr container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.372146 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" podUID="f51320f2-d5d7-4d3a-a23a-efbe6290fe51" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.372492 4636 patch_prober.go:28] interesting pod/console-operator-58897d9998-497mz container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.372511 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-497mz" podUID="14baf1ce-9a21-4232-a0b0-3ad606d31e45" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.372533 4636 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-76lrr container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.28:6443/healthz\": dial tcp 10.217.0.28:6443: connect: connection refused" start-of-body= Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.372581 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" podUID="776d4066-e52b-45f4-8d1c-eaad48feabc9" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.28:6443/healthz\": dial tcp 10.217.0.28:6443: connect: connection refused" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.372986 4636 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-9759p container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.373008 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" podUID="b2766a44-24c7-48ff-943f-3a225eb74dec" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.373058 4636 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-cm9hg container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.373072 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" podUID="cf2410ff-f182-4416-ba76-49b77dfcce3a" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.443729 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.445700 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:06.945683111 +0000 UTC m=+158.268691130 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.507623 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wksjf" podStartSLOduration=135.507600502 podStartE2EDuration="2m15.507600502s" podCreationTimestamp="2025-10-02 21:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.466871103 +0000 UTC m=+157.789879122" watchObservedRunningTime="2025-10-02 21:26:06.507600502 +0000 UTC m=+157.830608521" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.510017 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bsk9d" podStartSLOduration=136.510005909 podStartE2EDuration="2m16.510005909s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.507270483 +0000 UTC m=+157.830278502" watchObservedRunningTime="2025-10-02 21:26:06.510005909 +0000 UTC m=+157.833013928" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.544380 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-xfzw2" podStartSLOduration=136.54436483 podStartE2EDuration="2m16.54436483s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.542551289 +0000 UTC m=+157.865559558" watchObservedRunningTime="2025-10-02 21:26:06.54436483 +0000 UTC m=+157.867372849" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.544982 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.546224 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.046209021 +0000 UTC m=+158.369217040 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.611520 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-zkhsq" podStartSLOduration=8.611502227 podStartE2EDuration="8.611502227s" podCreationTimestamp="2025-10-02 21:25:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.610429557 +0000 UTC m=+157.933437576" watchObservedRunningTime="2025-10-02 21:26:06.611502227 +0000 UTC m=+157.934510246" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.611699 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" podStartSLOduration=135.611694242 podStartE2EDuration="2m15.611694242s" podCreationTimestamp="2025-10-02 21:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.589137311 +0000 UTC m=+157.912145320" watchObservedRunningTime="2025-10-02 21:26:06.611694242 +0000 UTC m=+157.934702261" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.635586 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-5r2lm" podStartSLOduration=8.63557302 podStartE2EDuration="8.63557302s" podCreationTimestamp="2025-10-02 21:25:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.635131377 +0000 UTC m=+157.958139416" watchObservedRunningTime="2025-10-02 21:26:06.63557302 +0000 UTC m=+157.958581039" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.646557 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.646870 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.146858985 +0000 UTC m=+158.469867004 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.666262 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mt7vm" podStartSLOduration=136.666235447 podStartE2EDuration="2m16.666235447s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.658063408 +0000 UTC m=+157.981071437" watchObservedRunningTime="2025-10-02 21:26:06.666235447 +0000 UTC m=+157.989243466" Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.747153 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.747301 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.247283013 +0000 UTC m=+158.570291032 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.747432 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.747720 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.247712045 +0000 UTC m=+158.570720064 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.848435 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.848653 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.348611995 +0000 UTC m=+158.671620014 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.849028 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.849384 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.349374987 +0000 UTC m=+158.672383006 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.949700 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.949893 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.449865166 +0000 UTC m=+158.772873185 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:06 crc kubenswrapper[4636]: I1002 21:26:06.949999 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:06 crc kubenswrapper[4636]: E1002 21:26:06.950316 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.450306369 +0000 UTC m=+158.773314388 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.051342 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.051531 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.551505148 +0000 UTC m=+158.874513167 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.051647 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.051938 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.551925039 +0000 UTC m=+158.874933058 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.142597 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:07 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:07 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:07 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.142665 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.152721 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.153082 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.653054546 +0000 UTC m=+158.976062565 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.153269 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.153613 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.653602651 +0000 UTC m=+158.976610770 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.254087 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.254256 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.754227614 +0000 UTC m=+159.077235643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.254309 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.254804 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.754724578 +0000 UTC m=+159.077732617 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.355532 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.355887 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.855851315 +0000 UTC m=+159.178859344 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.380414 4636 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-v8nrr container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.380465 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" podUID="f51320f2-d5d7-4d3a-a23a-efbe6290fe51" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.381669 4636 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-cppbr container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": dial tcp 10.217.0.34:5443: connect: connection refused" start-of-body= Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.381682 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.381737 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.381694 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" podUID="b92378a3-c9aa-4e73-8336-fdd168f717f0" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": dial tcp 10.217.0.34:5443: connect: connection refused" Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.401060 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" podStartSLOduration=136.401043709 podStartE2EDuration="2m16.401043709s" podCreationTimestamp="2025-10-02 21:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:06.690829404 +0000 UTC m=+158.013837443" watchObservedRunningTime="2025-10-02 21:26:07.401043709 +0000 UTC m=+158.724051728" Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.401499 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" podStartSLOduration=136.401492781 podStartE2EDuration="2m16.401492781s" podCreationTimestamp="2025-10-02 21:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:07.400237176 +0000 UTC m=+158.723245195" watchObservedRunningTime="2025-10-02 21:26:07.401492781 +0000 UTC m=+158.724500790" Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.456902 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.458897 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:07.958884316 +0000 UTC m=+159.281892335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.558132 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.558615 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.058596203 +0000 UTC m=+159.381604232 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.660160 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.660485 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.160470411 +0000 UTC m=+159.483478430 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.761194 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.761426 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.261394363 +0000 UTC m=+159.584402382 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.761689 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.762026 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.2620131 +0000 UTC m=+159.585021119 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.862552 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.862974 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.362949092 +0000 UTC m=+159.685957111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:07 crc kubenswrapper[4636]: I1002 21:26:07.964627 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:07 crc kubenswrapper[4636]: E1002 21:26:07.964946 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.464932553 +0000 UTC m=+159.787940572 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.066166 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.066323 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.566300177 +0000 UTC m=+159.889308186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.066497 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.067100 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.567077628 +0000 UTC m=+159.890085687 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.142439 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:08 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:08 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:08 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.142498 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.167949 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.168204 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.668152024 +0000 UTC m=+159.991160043 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.168299 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.168573 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.668560946 +0000 UTC m=+159.991568965 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.269641 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.270068 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.770039612 +0000 UTC m=+160.093047631 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.371583 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.371967 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.871952572 +0000 UTC m=+160.194960591 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.384174 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-t6w5h" event={"ID":"67eaa966-b8bc-427c-a47f-1314b84d74cc","Type":"ContainerStarted","Data":"e352a6b247b05777425862f6d9732ab97a8a3abcbeb5805e63cace03948f26ed"} Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.385355 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" event={"ID":"d566f76d-7861-4304-b121-22fddb254188","Type":"ContainerStarted","Data":"4e72801d5408df2fdfe5258ad2ef7e4b23ceb6439cdb903a363906eadbf15956"} Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.386680 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs" event={"ID":"4bdf9ed1-c4f3-4364-866b-ee6098476ab1","Type":"ContainerStarted","Data":"86c4d374eebc52ce00379350170519ce91c599ab0d8b6afc04f63be7bf1b9c12"} Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.388903 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mc647" event={"ID":"97a981ff-12be-4043-afb9-d8c9fa7ef9d5","Type":"ContainerStarted","Data":"bb8117db0820e2c8762aefed9836fdb2348be2c77c2d0437f63348bd6739c9cc"} Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.389711 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.390457 4636 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-cppbr container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": dial tcp 10.217.0.34:5443: connect: connection refused" start-of-body= Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.390493 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" podUID="b92378a3-c9aa-4e73-8336-fdd168f717f0" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": dial tcp 10.217.0.34:5443: connect: connection refused" Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.391192 4636 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-g4hs6 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.391248 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" podUID="610b97c0-7fa1-4c6b-bfb8-2247491aae21" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.434450 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" podStartSLOduration=138.434433218 podStartE2EDuration="2m18.434433218s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:08.432301379 +0000 UTC m=+159.755309398" watchObservedRunningTime="2025-10-02 21:26:08.434433218 +0000 UTC m=+159.757441237" Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.435030 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7br4x" podStartSLOduration=138.435025905 podStartE2EDuration="2m18.435025905s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:08.414430269 +0000 UTC m=+159.737438298" watchObservedRunningTime="2025-10-02 21:26:08.435025905 +0000 UTC m=+159.758033924" Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.472584 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.472712 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.972694118 +0000 UTC m=+160.295702137 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.472968 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.474356 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:08.974347624 +0000 UTC m=+160.297355643 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.574466 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.574680 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.074643388 +0000 UTC m=+160.397651407 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.574737 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.575212 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.075203724 +0000 UTC m=+160.398211743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.676493 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.677032 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.17701653 +0000 UTC m=+160.500024539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.778004 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.778445 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.278414335 +0000 UTC m=+160.601422354 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.790245 4636 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-cm9hg container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.790292 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" podUID="cf2410ff-f182-4416-ba76-49b77dfcce3a" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.790744 4636 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-cm9hg container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.790793 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" podUID="cf2410ff-f182-4416-ba76-49b77dfcce3a" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.895666 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.900578 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.400484238 +0000 UTC m=+160.723492257 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:08 crc kubenswrapper[4636]: I1002 21:26:08.998000 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:08 crc kubenswrapper[4636]: E1002 21:26:08.998399 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.498384745 +0000 UTC m=+160.821392764 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.004331 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.005211 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.009400 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.009482 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.018318 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.098618 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.098951 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.598904935 +0000 UTC m=+160.921912954 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.099895 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.100319 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.600306704 +0000 UTC m=+160.923314723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.142705 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:09 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:09 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:09 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.142783 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.201705 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.201889 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.201914 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.202065 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.702047378 +0000 UTC m=+161.025055407 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.303811 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.303909 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.303933 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.304532 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.804519873 +0000 UTC m=+161.127527892 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.304697 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.332226 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.396590 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" event={"ID":"e3d86ad9-1a43-4146-aa2b-13a70e8acccb","Type":"ContainerStarted","Data":"14860000bb2f21430c41e54805c8c3cdf28df664e32f38b301af0af308e58e0a"} Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.399245 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" event={"ID":"1c1a896f-2199-458b-9922-932040317faa","Type":"ContainerStarted","Data":"b11c978c596808369330507f3831c11c06336568597e12e49f93e27f4d1c8584"} Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.400654 4636 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-g4hs6 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.400707 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" podUID="610b97c0-7fa1-4c6b-bfb8-2247491aae21" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.405219 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.405447 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.905424384 +0000 UTC m=+161.228432403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.405526 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.405879 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:09.905867326 +0000 UTC m=+161.228875345 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.476241 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-298vs" podStartSLOduration=139.476210223 podStartE2EDuration="2m19.476210223s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:09.474957588 +0000 UTC m=+160.797965607" watchObservedRunningTime="2025-10-02 21:26:09.476210223 +0000 UTC m=+160.799218242" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.478802 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" podStartSLOduration=139.478793225 podStartE2EDuration="2m19.478793225s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:09.44714114 +0000 UTC m=+160.770149159" watchObservedRunningTime="2025-10-02 21:26:09.478793225 +0000 UTC m=+160.801801244" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.506771 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.506923 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.006891301 +0000 UTC m=+161.329899320 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.508512 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.508603 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-mc647" podStartSLOduration=139.508587758 podStartE2EDuration="2m19.508587758s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:09.504142134 +0000 UTC m=+160.827150163" watchObservedRunningTime="2025-10-02 21:26:09.508587758 +0000 UTC m=+160.831595777" Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.508705 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.008690071 +0000 UTC m=+161.331698090 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.542306 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-w89sw" podStartSLOduration=139.54228737 podStartE2EDuration="2m19.54228737s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:09.541593071 +0000 UTC m=+160.864601090" watchObservedRunningTime="2025-10-02 21:26:09.54228737 +0000 UTC m=+160.865295389" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.589187 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-t6w5h" podStartSLOduration=11.589170891 podStartE2EDuration="11.589170891s" podCreationTimestamp="2025-10-02 21:25:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:09.588502802 +0000 UTC m=+160.911510831" watchObservedRunningTime="2025-10-02 21:26:09.589170891 +0000 UTC m=+160.912178910" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.612454 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.612843 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.112828692 +0000 UTC m=+161.435836711 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.627527 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.682553 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.695601 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.696316 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.697513 4636 patch_prober.go:28] interesting pod/console-f9d7485db-wdwwc container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.30:8443/health\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.697548 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-wdwwc" podUID="b4ddd281-18ba-41ec-b5a6-788b9f5a942e" containerName="console" probeResult="failure" output="Get \"https://10.217.0.30:8443/health\": dial tcp 10.217.0.30:8443: connect: connection refused" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.714076 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.714369 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.214356671 +0000 UTC m=+161.537364690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.748468 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.748514 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.748552 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.748602 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.815009 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.815129 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.315113327 +0000 UTC m=+161.638121346 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.816440 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.816499 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.316482486 +0000 UTC m=+161.639490505 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.846376 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-t6w5h" Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.917066 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.917205 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.417188281 +0000 UTC m=+161.740196300 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:09 crc kubenswrapper[4636]: I1002 21:26:09.917404 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:09 crc kubenswrapper[4636]: E1002 21:26:09.917729 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.417721746 +0000 UTC m=+161.740729765 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.021465 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.021777 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.521744114 +0000 UTC m=+161.844752133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.124458 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.124866 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.624852957 +0000 UTC m=+161.947860986 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.141363 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.149630 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:10 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:10 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:10 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.149682 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.232202 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.232651 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.73263611 +0000 UTC m=+162.055644129 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.277044 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.332390 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.333456 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.334511 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.834498838 +0000 UTC m=+162.157506857 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.385772 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.389213 4636 patch_prober.go:28] interesting pod/apiserver-76f77b778f-lnls5 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.26:8443/livez\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.389362 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" podUID="e3d86ad9-1a43-4146-aa2b-13a70e8acccb" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.26:8443/livez\": dial tcp 10.217.0.26:8443: connect: connection refused" Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.389738 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.417020 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.435278 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.436485 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:10.936471558 +0000 UTC m=+162.259479577 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.456731 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" event={"ID":"1c1a896f-2199-458b-9922-932040317faa","Type":"ContainerStarted","Data":"c49ce2adede9aed1cc90e22a69844e7478bd21bc99a98434ea45b2550ea837d3"} Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.523162 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-497mz" Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.541970 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.542333 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.042320058 +0000 UTC m=+162.365328077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.651539 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.652192 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.152174119 +0000 UTC m=+162.475182138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.653349 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.655067 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.155057359 +0000 UTC m=+162.478065368 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.754479 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.754793 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.254734445 +0000 UTC m=+162.577742464 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.754877 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.755297 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.2552818 +0000 UTC m=+162.578289819 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.857298 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.857636 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.357618331 +0000 UTC m=+162.680626350 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.876070 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-v8nrr" Oct 02 21:26:10 crc kubenswrapper[4636]: I1002 21:26:10.958778 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:10 crc kubenswrapper[4636]: E1002 21:26:10.959089 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.459075958 +0000 UTC m=+162.782083977 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.026024 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.049914 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mt9qf" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.060911 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:11 crc kubenswrapper[4636]: E1002 21:26:11.061095 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.561054039 +0000 UTC m=+162.884062058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.061324 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:11 crc kubenswrapper[4636]: E1002 21:26:11.061775 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.561745078 +0000 UTC m=+162.884771717 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.139353 4636 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-g4hs6 container/marketplace-operator namespace/openshift-marketplace: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.140628 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" podUID="610b97c0-7fa1-4c6b-bfb8-2247491aae21" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.139832 4636 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-g4hs6 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.141165 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" podUID="610b97c0-7fa1-4c6b-bfb8-2247491aae21" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.147111 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:11 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:11 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:11 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.147183 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.162303 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:11 crc kubenswrapper[4636]: E1002 21:26:11.163408 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.663381129 +0000 UTC m=+162.986389148 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.209572 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-69pw2"] Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.210970 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.220555 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.259543 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-69pw2"] Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.266492 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:11 crc kubenswrapper[4636]: E1002 21:26:11.266848 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.766835782 +0000 UTC m=+163.089843801 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.373981 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.374197 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-utilities\") pod \"certified-operators-69pw2\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.374237 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd84r\" (UniqueName: \"kubernetes.io/projected/5332eaa0-5912-4363-b6f4-acb85584401e-kube-api-access-sd84r\") pod \"certified-operators-69pw2\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.374285 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-catalog-content\") pod \"certified-operators-69pw2\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:26:11 crc kubenswrapper[4636]: E1002 21:26:11.374364 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.874349927 +0000 UTC m=+163.197357946 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.414336 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h5pjb"] Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.416435 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.426355 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.461861 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h5pjb"] Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.479038 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-utilities\") pod \"certified-operators-69pw2\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.479100 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.479146 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd84r\" (UniqueName: \"kubernetes.io/projected/5332eaa0-5912-4363-b6f4-acb85584401e-kube-api-access-sd84r\") pod \"certified-operators-69pw2\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.479212 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-catalog-content\") pod \"certified-operators-69pw2\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.479769 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-catalog-content\") pod \"certified-operators-69pw2\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.480035 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-utilities\") pod \"certified-operators-69pw2\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:26:11 crc kubenswrapper[4636]: E1002 21:26:11.480298 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:11.980285869 +0000 UTC m=+163.303293888 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.532012 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774","Type":"ContainerStarted","Data":"4163d2de3eb58996b7c31772c84ddedf872290889d9e8176342d39738cf757cf"} Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.532055 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774","Type":"ContainerStarted","Data":"66acdec5bc85893e0e431e4f81ced0da4d1543cb29f750ab5b3b537fe5888eb7"} Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.543589 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd84r\" (UniqueName: \"kubernetes.io/projected/5332eaa0-5912-4363-b6f4-acb85584401e-kube-api-access-sd84r\") pod \"certified-operators-69pw2\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.549741 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.586252 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vhp82"] Oct 02 21:26:11 crc kubenswrapper[4636]: E1002 21:26:11.586321 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.086303713 +0000 UTC m=+163.409311732 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.586251 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.586706 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-utilities\") pod \"community-operators-h5pjb\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.586735 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-catalog-content\") pod \"community-operators-h5pjb\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.586768 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqmpn\" (UniqueName: \"kubernetes.io/projected/efd2d320-6d76-4faa-b1b2-d062fd21cec8-kube-api-access-qqmpn\") pod \"community-operators-h5pjb\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.586817 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:11 crc kubenswrapper[4636]: E1002 21:26:11.587115 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.087105845 +0000 UTC m=+163.410113864 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.587215 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.590685 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" event={"ID":"1c1a896f-2199-458b-9922-932040317faa","Type":"ContainerStarted","Data":"43a269261d5c2e48039b10e849e71bd0eb2752f1e07c9dba6028d863e36e9277"} Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.651580 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vhp82"] Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.653664 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.653647835 podStartE2EDuration="3.653647835s" podCreationTimestamp="2025-10-02 21:26:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:11.636265379 +0000 UTC m=+162.959273388" watchObservedRunningTime="2025-10-02 21:26:11.653647835 +0000 UTC m=+162.976655854" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.699794 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.700046 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pt8z6\" (UniqueName: \"kubernetes.io/projected/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-kube-api-access-pt8z6\") pod \"certified-operators-vhp82\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.700168 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-catalog-content\") pod \"certified-operators-vhp82\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.700222 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-utilities\") pod \"community-operators-h5pjb\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.700241 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-utilities\") pod \"certified-operators-vhp82\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.700279 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-catalog-content\") pod \"community-operators-h5pjb\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.700296 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqmpn\" (UniqueName: \"kubernetes.io/projected/efd2d320-6d76-4faa-b1b2-d062fd21cec8-kube-api-access-qqmpn\") pod \"community-operators-h5pjb\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:26:11 crc kubenswrapper[4636]: E1002 21:26:11.700620 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.200605128 +0000 UTC m=+163.523613147 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.706179 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-catalog-content\") pod \"community-operators-h5pjb\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.706980 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-utilities\") pod \"community-operators-h5pjb\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.754165 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c6xmc"] Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.755159 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.766711 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqmpn\" (UniqueName: \"kubernetes.io/projected/efd2d320-6d76-4faa-b1b2-d062fd21cec8-kube-api-access-qqmpn\") pod \"community-operators-h5pjb\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.772690 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c6xmc"] Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.791549 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.802865 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-utilities\") pod \"certified-operators-vhp82\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.807322 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pt8z6\" (UniqueName: \"kubernetes.io/projected/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-kube-api-access-pt8z6\") pod \"certified-operators-vhp82\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.807447 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.807875 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-catalog-content\") pod \"certified-operators-vhp82\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.808259 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-catalog-content\") pod \"certified-operators-vhp82\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.803319 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-utilities\") pod \"certified-operators-vhp82\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:26:11 crc kubenswrapper[4636]: E1002 21:26:11.811148 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.311129578 +0000 UTC m=+163.634137597 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.820935 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cm9hg" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.848295 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pt8z6\" (UniqueName: \"kubernetes.io/projected/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-kube-api-access-pt8z6\") pod \"certified-operators-vhp82\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.909323 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.909510 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-catalog-content\") pod \"community-operators-c6xmc\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.909552 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-utilities\") pod \"community-operators-c6xmc\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.909626 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77f5t\" (UniqueName: \"kubernetes.io/projected/b900aa72-5621-4793-8652-4e6fb02b02e9-kube-api-access-77f5t\") pod \"community-operators-c6xmc\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:26:11 crc kubenswrapper[4636]: E1002 21:26:11.910526 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.410494686 +0000 UTC m=+163.733502705 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:11 crc kubenswrapper[4636]: I1002 21:26:11.972198 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.013477 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77f5t\" (UniqueName: \"kubernetes.io/projected/b900aa72-5621-4793-8652-4e6fb02b02e9-kube-api-access-77f5t\") pod \"community-operators-c6xmc\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.013522 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.013582 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-catalog-content\") pod \"community-operators-c6xmc\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.013601 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-utilities\") pod \"community-operators-c6xmc\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.014025 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-utilities\") pod \"community-operators-c6xmc\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:26:12 crc kubenswrapper[4636]: E1002 21:26:12.014570 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.514560045 +0000 UTC m=+163.837568064 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.014945 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-catalog-content\") pod \"community-operators-c6xmc\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.072553 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77f5t\" (UniqueName: \"kubernetes.io/projected/b900aa72-5621-4793-8652-4e6fb02b02e9-kube-api-access-77f5t\") pod \"community-operators-c6xmc\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.116850 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:12 crc kubenswrapper[4636]: E1002 21:26:12.116992 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.616964788 +0000 UTC m=+163.939972797 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.117606 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:12 crc kubenswrapper[4636]: E1002 21:26:12.117951 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.617935655 +0000 UTC m=+163.940943684 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.119885 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.147919 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:12 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:12 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:12 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.147971 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.217214 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-69pw2"] Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.218215 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:12 crc kubenswrapper[4636]: E1002 21:26:12.218559 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.718543338 +0000 UTC m=+164.041551357 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.326000 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:12 crc kubenswrapper[4636]: E1002 21:26:12.326290 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.82627806 +0000 UTC m=+164.149286079 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.388870 4636 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-cppbr container/packageserver namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.388955 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" podUID="b92378a3-c9aa-4e73-8336-fdd168f717f0" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.389722 4636 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-cppbr container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.389790 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" podUID="b92378a3-c9aa-4e73-8336-fdd168f717f0" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.426957 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:12 crc kubenswrapper[4636]: E1002 21:26:12.427437 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:12.927421347 +0000 UTC m=+164.250429366 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.470321 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h5pjb"] Oct 02 21:26:12 crc kubenswrapper[4636]: W1002 21:26:12.485888 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podefd2d320_6d76_4faa_b1b2_d062fd21cec8.slice/crio-fc907fb40fb0c397418ffcfe35775b90f7e805121da30c01a4263e0ce11f8065 WatchSource:0}: Error finding container fc907fb40fb0c397418ffcfe35775b90f7e805121da30c01a4263e0ce11f8065: Status 404 returned error can't find the container with id fc907fb40fb0c397418ffcfe35775b90f7e805121da30c01a4263e0ce11f8065 Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.532534 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:12 crc kubenswrapper[4636]: E1002 21:26:12.532863 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.032850745 +0000 UTC m=+164.355858764 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.618735 4636 generic.go:334] "Generic (PLEG): container finished" podID="4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774" containerID="4163d2de3eb58996b7c31772c84ddedf872290889d9e8176342d39738cf757cf" exitCode=0 Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.619210 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774","Type":"ContainerDied","Data":"4163d2de3eb58996b7c31772c84ddedf872290889d9e8176342d39738cf757cf"} Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.633681 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:12 crc kubenswrapper[4636]: E1002 21:26:12.634087 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.134071665 +0000 UTC m=+164.457079674 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.634243 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69pw2" event={"ID":"5332eaa0-5912-4363-b6f4-acb85584401e","Type":"ContainerStarted","Data":"5d205208bdef460cff0ed4d90f88dcd423d63498f8163356d5d3e7bdaa750161"} Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.638834 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5pjb" event={"ID":"efd2d320-6d76-4faa-b1b2-d062fd21cec8","Type":"ContainerStarted","Data":"fc907fb40fb0c397418ffcfe35775b90f7e805121da30c01a4263e0ce11f8065"} Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.641625 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" event={"ID":"1c1a896f-2199-458b-9922-932040317faa","Type":"ContainerStarted","Data":"b6a5de7a9a67ebb5f03add9d11ee9bc23610e938b4e8fb8c7a5225cddaf31790"} Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.694517 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-qj2jd" podStartSLOduration=14.694496104 podStartE2EDuration="14.694496104s" podCreationTimestamp="2025-10-02 21:25:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:12.691968043 +0000 UTC m=+164.014976062" watchObservedRunningTime="2025-10-02 21:26:12.694496104 +0000 UTC m=+164.017504113" Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.716115 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vhp82"] Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.741737 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:12 crc kubenswrapper[4636]: E1002 21:26:12.743541 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.243515504 +0000 UTC m=+164.566523523 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.842657 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:12 crc kubenswrapper[4636]: E1002 21:26:12.843132 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.343111249 +0000 UTC m=+164.666119278 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.892342 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c6xmc"] Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.948188 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:12 crc kubenswrapper[4636]: E1002 21:26:12.948702 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.44868499 +0000 UTC m=+164.771693009 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:12 crc kubenswrapper[4636]: I1002 21:26:12.993913 4636 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.050111 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.050706 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.550681622 +0000 UTC m=+164.873689641 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.051193 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.051394 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.051709 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.55168847 +0000 UTC m=+164.874696489 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.058927 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e169ed7-2c2e-4623-9f21-330753911ab5-metrics-certs\") pod \"network-metrics-daemon-zssg6\" (UID: \"5e169ed7-2c2e-4623-9f21-330753911ab5\") " pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.078559 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zssg6" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.143813 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:13 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:13 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:13 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.143894 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.153849 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.154045 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.154534 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.155256 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.655239765 +0000 UTC m=+164.978247784 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.157660 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.157837 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.169169 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.257526 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.257574 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc9e4681-64e1-4800-8ba0-26f4d5179612-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dc9e4681-64e1-4800-8ba0-26f4d5179612\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.257615 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dc9e4681-64e1-4800-8ba0-26f4d5179612-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dc9e4681-64e1-4800-8ba0-26f4d5179612\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.258073 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.758049149 +0000 UTC m=+165.081057168 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.349960 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tgg4v"] Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.350895 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.355386 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.359070 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.363278 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.863232739 +0000 UTC m=+165.186240758 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.363380 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dc9e4681-64e1-4800-8ba0-26f4d5179612-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dc9e4681-64e1-4800-8ba0-26f4d5179612\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.363528 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.363583 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc9e4681-64e1-4800-8ba0-26f4d5179612-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dc9e4681-64e1-4800-8ba0-26f4d5179612\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.363994 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dc9e4681-64e1-4800-8ba0-26f4d5179612-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dc9e4681-64e1-4800-8ba0-26f4d5179612\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.364221 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.864212057 +0000 UTC m=+165.187220076 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.367157 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tgg4v"] Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.382577 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zssg6"] Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.386879 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc9e4681-64e1-4800-8ba0-26f4d5179612-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dc9e4681-64e1-4800-8ba0-26f4d5179612\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.465243 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.465441 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.965415096 +0000 UTC m=+165.288423115 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.465512 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.465608 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-utilities\") pod \"redhat-marketplace-tgg4v\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.465625 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-catalog-content\") pod \"redhat-marketplace-tgg4v\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.465729 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbp8p\" (UniqueName: \"kubernetes.io/projected/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-kube-api-access-dbp8p\") pod \"redhat-marketplace-tgg4v\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.466132 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:13.966124736 +0000 UTC m=+165.289132755 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.473105 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.571940 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.572298 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-utilities\") pod \"redhat-marketplace-tgg4v\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.572326 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-catalog-content\") pod \"redhat-marketplace-tgg4v\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.572366 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbp8p\" (UniqueName: \"kubernetes.io/projected/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-kube-api-access-dbp8p\") pod \"redhat-marketplace-tgg4v\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.572883 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:14.07285341 +0000 UTC m=+165.395861429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.573158 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-utilities\") pod \"redhat-marketplace-tgg4v\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.577006 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-catalog-content\") pod \"redhat-marketplace-tgg4v\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.597859 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbp8p\" (UniqueName: \"kubernetes.io/projected/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-kube-api-access-dbp8p\") pod \"redhat-marketplace-tgg4v\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.653310 4636 generic.go:334] "Generic (PLEG): container finished" podID="3c7843cb-a7a2-4db4-b244-f88476448291" containerID="31365e2b97feab8dab1ff10cffd56c074b257ed7b231468afb2391c66cc4ec0c" exitCode=0 Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.653416 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" event={"ID":"3c7843cb-a7a2-4db4-b244-f88476448291","Type":"ContainerDied","Data":"31365e2b97feab8dab1ff10cffd56c074b257ed7b231468afb2391c66cc4ec0c"} Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.658533 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zssg6" event={"ID":"5e169ed7-2c2e-4623-9f21-330753911ab5","Type":"ContainerStarted","Data":"25fd5d2b71d21e8cd7eb5877a813887830d72e44daca2c7a83dd2a0ac1e61ad4"} Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.660581 4636 generic.go:334] "Generic (PLEG): container finished" podID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerID="b9267470dbe39bb13350347701d06da2c86873f30d149984e1195be261118376" exitCode=0 Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.660653 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5pjb" event={"ID":"efd2d320-6d76-4faa-b1b2-d062fd21cec8","Type":"ContainerDied","Data":"b9267470dbe39bb13350347701d06da2c86873f30d149984e1195be261118376"} Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.662975 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.663576 4636 generic.go:334] "Generic (PLEG): container finished" podID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerID="931cacb7e8549e74fee10905c03976ac516c270f9a41868f88e440b2a19590ba" exitCode=0 Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.663634 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6xmc" event={"ID":"b900aa72-5621-4793-8652-4e6fb02b02e9","Type":"ContainerDied","Data":"931cacb7e8549e74fee10905c03976ac516c270f9a41868f88e440b2a19590ba"} Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.663649 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6xmc" event={"ID":"b900aa72-5621-4793-8652-4e6fb02b02e9","Type":"ContainerStarted","Data":"65e10950b27a9fc45bf77646cde10d8d046517cb9452518abf5d730cd291b4d4"} Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.667906 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.670706 4636 generic.go:334] "Generic (PLEG): container finished" podID="5332eaa0-5912-4363-b6f4-acb85584401e" containerID="b23733fb6ea42608597f5e5540fa260e7b133167de749d20dfc6cb9068f5bd2c" exitCode=0 Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.670841 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69pw2" event={"ID":"5332eaa0-5912-4363-b6f4-acb85584401e","Type":"ContainerDied","Data":"b23733fb6ea42608597f5e5540fa260e7b133167de749d20dfc6cb9068f5bd2c"} Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.675958 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.677982 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:14.177960238 +0000 UTC m=+165.500968257 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.679035 4636 generic.go:334] "Generic (PLEG): container finished" podID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerID="f257b53318b3194802cfc8589f772b02aefed7afdcf526c367bce3fd473fc098" exitCode=0 Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.679823 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vhp82" event={"ID":"6a2da5b7-15f5-45c5-bb0a-367cddf5e786","Type":"ContainerDied","Data":"f257b53318b3194802cfc8589f772b02aefed7afdcf526c367bce3fd473fc098"} Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.679889 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vhp82" event={"ID":"6a2da5b7-15f5-45c5-bb0a-367cddf5e786","Type":"ContainerStarted","Data":"f85b7e5cd48f9fbbaf920bc269b5b4bad35c55f077a01596da925f4d4acdbc9d"} Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.754566 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wrl7v"] Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.755606 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.778436 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.779286 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-utilities\") pod \"redhat-marketplace-wrl7v\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.779380 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnwxj\" (UniqueName: \"kubernetes.io/projected/72c576ab-c0b7-42ba-9f24-f52f26167a3e-kube-api-access-cnwxj\") pod \"redhat-marketplace-wrl7v\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.779459 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-catalog-content\") pod \"redhat-marketplace-wrl7v\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.781028 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 21:26:14.281000449 +0000 UTC m=+165.604008468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.801414 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.810571 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wrl7v"] Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.846596 4636 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-02T21:26:12.993941545Z","Handler":null,"Name":""} Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.881876 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.881947 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-catalog-content\") pod \"redhat-marketplace-wrl7v\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.881986 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-utilities\") pod \"redhat-marketplace-wrl7v\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.882042 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnwxj\" (UniqueName: \"kubernetes.io/projected/72c576ab-c0b7-42ba-9f24-f52f26167a3e-kube-api-access-cnwxj\") pod \"redhat-marketplace-wrl7v\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:26:13 crc kubenswrapper[4636]: E1002 21:26:13.882942 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 21:26:14.382907448 +0000 UTC m=+165.705915467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qqzhj" (UID: "678df1e2-1565-4186-9221-80dac59e28aa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.884263 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-catalog-content\") pod \"redhat-marketplace-wrl7v\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.884544 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-utilities\") pod \"redhat-marketplace-wrl7v\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:26:13 crc kubenswrapper[4636]: W1002 21:26:13.888103 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poddc9e4681_64e1_4800_8ba0_26f4d5179612.slice/crio-e141be80cb19b5f389b4c780dd0ae33952c6da4cde8b3a60ba372e8302326ca9 WatchSource:0}: Error finding container e141be80cb19b5f389b4c780dd0ae33952c6da4cde8b3a60ba372e8302326ca9: Status 404 returned error can't find the container with id e141be80cb19b5f389b4c780dd0ae33952c6da4cde8b3a60ba372e8302326ca9 Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.896053 4636 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.896095 4636 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.927236 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnwxj\" (UniqueName: \"kubernetes.io/projected/72c576ab-c0b7-42ba-9f24-f52f26167a3e-kube-api-access-cnwxj\") pod \"redhat-marketplace-wrl7v\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:26:13 crc kubenswrapper[4636]: I1002 21:26:13.985132 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:13.998456 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.047820 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tgg4v"] Oct 02 21:26:14 crc kubenswrapper[4636]: W1002 21:26:14.074886 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod825fd9ab_b13c_4a6c_88d2_63710ba4ad41.slice/crio-9203418e223e54b2b1a2954f25548731eb98fbfe98397aa2c96de081b97b2f92 WatchSource:0}: Error finding container 9203418e223e54b2b1a2954f25548731eb98fbfe98397aa2c96de081b97b2f92: Status 404 returned error can't find the container with id 9203418e223e54b2b1a2954f25548731eb98fbfe98397aa2c96de081b97b2f92 Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.081008 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.087527 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.091249 4636 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.091285 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.097412 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.136974 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qqzhj\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.143843 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:14 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:14 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:14 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.143946 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.284440 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.289396 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kube-api-access\") pod \"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774\" (UID: \"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774\") " Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.289463 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kubelet-dir\") pod \"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774\" (UID: \"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774\") " Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.289666 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774" (UID: "4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.289883 4636 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.298400 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774" (UID: "4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.350569 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4nfgq"] Oct 02 21:26:14 crc kubenswrapper[4636]: E1002 21:26:14.350831 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774" containerName="pruner" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.350843 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774" containerName="pruner" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.350971 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774" containerName="pruner" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.351922 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.357830 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.362943 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4nfgq"] Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.392373 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.435115 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wrl7v"] Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.494157 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-catalog-content\") pod \"redhat-operators-4nfgq\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.494603 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fd7d\" (UniqueName: \"kubernetes.io/projected/14a6e48e-5e60-4cb6-9c9c-922868118ed4-kube-api-access-4fd7d\") pod \"redhat-operators-4nfgq\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.494653 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-utilities\") pod \"redhat-operators-4nfgq\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.595877 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-utilities\") pod \"redhat-operators-4nfgq\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.595946 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-catalog-content\") pod \"redhat-operators-4nfgq\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.595980 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fd7d\" (UniqueName: \"kubernetes.io/projected/14a6e48e-5e60-4cb6-9c9c-922868118ed4-kube-api-access-4fd7d\") pod \"redhat-operators-4nfgq\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.596722 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-utilities\") pod \"redhat-operators-4nfgq\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.596974 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-catalog-content\") pod \"redhat-operators-4nfgq\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.638943 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fd7d\" (UniqueName: \"kubernetes.io/projected/14a6e48e-5e60-4cb6-9c9c-922868118ed4-kube-api-access-4fd7d\") pod \"redhat-operators-4nfgq\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.688076 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.688258 4636 generic.go:334] "Generic (PLEG): container finished" podID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" containerID="c9df0d7648a264bc50e5f57462f2795c8e5fe085447e005d49b7dc117a4e7c06" exitCode=0 Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.688424 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgg4v" event={"ID":"825fd9ab-b13c-4a6c-88d2-63710ba4ad41","Type":"ContainerDied","Data":"c9df0d7648a264bc50e5f57462f2795c8e5fe085447e005d49b7dc117a4e7c06"} Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.688467 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgg4v" event={"ID":"825fd9ab-b13c-4a6c-88d2-63710ba4ad41","Type":"ContainerStarted","Data":"9203418e223e54b2b1a2954f25548731eb98fbfe98397aa2c96de081b97b2f92"} Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.690924 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wrl7v" event={"ID":"72c576ab-c0b7-42ba-9f24-f52f26167a3e","Type":"ContainerStarted","Data":"a03dee2f50e190adfa0cac5d1911371cba612668429978ba893c19e7e2ba474e"} Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.693642 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4cfe6a9f-ff89-4fcc-bbe6-e2a2ab911774","Type":"ContainerDied","Data":"66acdec5bc85893e0e431e4f81ced0da4d1543cb29f750ab5b3b537fe5888eb7"} Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.693676 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66acdec5bc85893e0e431e4f81ced0da4d1543cb29f750ab5b3b537fe5888eb7" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.693699 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.700361 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dc9e4681-64e1-4800-8ba0-26f4d5179612","Type":"ContainerStarted","Data":"0bf26a7c3a6843695988c7f421a6530c979943328e55ed5a09f7827e8200b011"} Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.700406 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dc9e4681-64e1-4800-8ba0-26f4d5179612","Type":"ContainerStarted","Data":"e141be80cb19b5f389b4c780dd0ae33952c6da4cde8b3a60ba372e8302326ca9"} Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.702968 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zssg6" event={"ID":"5e169ed7-2c2e-4623-9f21-330753911ab5","Type":"ContainerStarted","Data":"a1cef30e6f45ec085bb8d135fd0888641d60a9d6d159df644e1503fe5777d394"} Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.703001 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zssg6" event={"ID":"5e169ed7-2c2e-4623-9f21-330753911ab5","Type":"ContainerStarted","Data":"17e23e9d39b291e285bf921ca005a9dce29d4d9ff782b580baf26b4d08f9eb0d"} Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.746008 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-zssg6" podStartSLOduration=144.745988345 podStartE2EDuration="2m24.745988345s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:14.743002262 +0000 UTC m=+166.066010291" watchObservedRunningTime="2025-10-02 21:26:14.745988345 +0000 UTC m=+166.068996364" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.780936 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xbxt6"] Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.782043 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.856553 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xbxt6"] Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.858358 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqzhj"] Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.906992 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-catalog-content\") pod \"redhat-operators-xbxt6\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.907181 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-utilities\") pod \"redhat-operators-xbxt6\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:26:14 crc kubenswrapper[4636]: I1002 21:26:14.907342 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcrg6\" (UniqueName: \"kubernetes.io/projected/38b81238-1668-4a16-828f-b8761d6e12d5-kube-api-access-wcrg6\") pod \"redhat-operators-xbxt6\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.009183 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcrg6\" (UniqueName: \"kubernetes.io/projected/38b81238-1668-4a16-828f-b8761d6e12d5-kube-api-access-wcrg6\") pod \"redhat-operators-xbxt6\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.009239 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-catalog-content\") pod \"redhat-operators-xbxt6\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.009282 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-utilities\") pod \"redhat-operators-xbxt6\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.009690 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-utilities\") pod \"redhat-operators-xbxt6\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.012029 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-catalog-content\") pod \"redhat-operators-xbxt6\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.069142 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcrg6\" (UniqueName: \"kubernetes.io/projected/38b81238-1668-4a16-828f-b8761d6e12d5-kube-api-access-wcrg6\") pod \"redhat-operators-xbxt6\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.070070 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.092118 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4nfgq"] Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.124343 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.154441 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:15 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:15 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:15 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.154547 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.213765 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3c7843cb-a7a2-4db4-b244-f88476448291-config-volume\") pod \"3c7843cb-a7a2-4db4-b244-f88476448291\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.213846 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3c7843cb-a7a2-4db4-b244-f88476448291-secret-volume\") pod \"3c7843cb-a7a2-4db4-b244-f88476448291\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.213883 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vc55\" (UniqueName: \"kubernetes.io/projected/3c7843cb-a7a2-4db4-b244-f88476448291-kube-api-access-5vc55\") pod \"3c7843cb-a7a2-4db4-b244-f88476448291\" (UID: \"3c7843cb-a7a2-4db4-b244-f88476448291\") " Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.215227 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c7843cb-a7a2-4db4-b244-f88476448291-config-volume" (OuterVolumeSpecName: "config-volume") pod "3c7843cb-a7a2-4db4-b244-f88476448291" (UID: "3c7843cb-a7a2-4db4-b244-f88476448291"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.223372 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c7843cb-a7a2-4db4-b244-f88476448291-kube-api-access-5vc55" (OuterVolumeSpecName: "kube-api-access-5vc55") pod "3c7843cb-a7a2-4db4-b244-f88476448291" (UID: "3c7843cb-a7a2-4db4-b244-f88476448291"). InnerVolumeSpecName "kube-api-access-5vc55". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.231088 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c7843cb-a7a2-4db4-b244-f88476448291-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3c7843cb-a7a2-4db4-b244-f88476448291" (UID: "3c7843cb-a7a2-4db4-b244-f88476448291"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.315881 4636 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3c7843cb-a7a2-4db4-b244-f88476448291-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.316568 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vc55\" (UniqueName: \"kubernetes.io/projected/3c7843cb-a7a2-4db4-b244-f88476448291-kube-api-access-5vc55\") on node \"crc\" DevicePath \"\"" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.316583 4636 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3c7843cb-a7a2-4db4-b244-f88476448291-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.389926 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.399447 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-lnls5" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.640644 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.671855 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xbxt6"] Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.715087 4636 generic.go:334] "Generic (PLEG): container finished" podID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" containerID="8676985073b88033bffeee576b7d94b662b8683511ba3b6602f0940db98f3361" exitCode=0 Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.715159 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wrl7v" event={"ID":"72c576ab-c0b7-42ba-9f24-f52f26167a3e","Type":"ContainerDied","Data":"8676985073b88033bffeee576b7d94b662b8683511ba3b6602f0940db98f3361"} Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.723738 4636 generic.go:334] "Generic (PLEG): container finished" podID="dc9e4681-64e1-4800-8ba0-26f4d5179612" containerID="0bf26a7c3a6843695988c7f421a6530c979943328e55ed5a09f7827e8200b011" exitCode=0 Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.723954 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dc9e4681-64e1-4800-8ba0-26f4d5179612","Type":"ContainerDied","Data":"0bf26a7c3a6843695988c7f421a6530c979943328e55ed5a09f7827e8200b011"} Oct 02 21:26:15 crc kubenswrapper[4636]: W1002 21:26:15.728327 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38b81238_1668_4a16_828f_b8761d6e12d5.slice/crio-e8274a0dff82dc949f303b8920451b08b28de405340a07afbcbf98676e6446ca WatchSource:0}: Error finding container e8274a0dff82dc949f303b8920451b08b28de405340a07afbcbf98676e6446ca: Status 404 returned error can't find the container with id e8274a0dff82dc949f303b8920451b08b28de405340a07afbcbf98676e6446ca Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.748603 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" event={"ID":"678df1e2-1565-4186-9221-80dac59e28aa","Type":"ContainerStarted","Data":"78d9d129cc5d73af57d585eb63ba25125226cc949ee1590a5e2a32a71daa1ee2"} Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.748657 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" event={"ID":"678df1e2-1565-4186-9221-80dac59e28aa","Type":"ContainerStarted","Data":"2b67439bbd0bff4ee76b421997b70dc64fd0d77c01db7d9c4162dd4cee544d7b"} Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.748699 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.779188 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.779854 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78" event={"ID":"3c7843cb-a7a2-4db4-b244-f88476448291","Type":"ContainerDied","Data":"a1a7a83535685a0a794962bafc89f26b0a46a12a914742ac69e7f656541e67c1"} Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.779890 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1a7a83535685a0a794962bafc89f26b0a46a12a914742ac69e7f656541e67c1" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.803801 4636 generic.go:334] "Generic (PLEG): container finished" podID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerID="2475d6ec173c929667de3c58daada53ab115d98cded91ccd55a9b4c2d2d17a8d" exitCode=0 Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.803915 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nfgq" event={"ID":"14a6e48e-5e60-4cb6-9c9c-922868118ed4","Type":"ContainerDied","Data":"2475d6ec173c929667de3c58daada53ab115d98cded91ccd55a9b4c2d2d17a8d"} Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.803967 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nfgq" event={"ID":"14a6e48e-5e60-4cb6-9c9c-922868118ed4","Type":"ContainerStarted","Data":"db263d8ca52b6ec153004ea91b46075db06888edf89c6f566f8f7e02d0756a28"} Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.859529 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-t6w5h" Oct 02 21:26:15 crc kubenswrapper[4636]: I1002 21:26:15.906525 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" podStartSLOduration=145.906493439 podStartE2EDuration="2m25.906493439s" podCreationTimestamp="2025-10-02 21:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:26:15.898266919 +0000 UTC m=+167.221274928" watchObservedRunningTime="2025-10-02 21:26:15.906493439 +0000 UTC m=+167.229501458" Oct 02 21:26:16 crc kubenswrapper[4636]: I1002 21:26:16.150877 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:16 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:16 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:16 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:16 crc kubenswrapper[4636]: I1002 21:26:16.151364 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:16 crc kubenswrapper[4636]: I1002 21:26:16.836691 4636 generic.go:334] "Generic (PLEG): container finished" podID="38b81238-1668-4a16-828f-b8761d6e12d5" containerID="c85b1f64edefe3ea1f48852a1de579fe71c9936e4818b507694c9e947d873b72" exitCode=0 Oct 02 21:26:16 crc kubenswrapper[4636]: I1002 21:26:16.837866 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xbxt6" event={"ID":"38b81238-1668-4a16-828f-b8761d6e12d5","Type":"ContainerDied","Data":"c85b1f64edefe3ea1f48852a1de579fe71c9936e4818b507694c9e947d873b72"} Oct 02 21:26:16 crc kubenswrapper[4636]: I1002 21:26:16.837913 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xbxt6" event={"ID":"38b81238-1668-4a16-828f-b8761d6e12d5","Type":"ContainerStarted","Data":"e8274a0dff82dc949f303b8920451b08b28de405340a07afbcbf98676e6446ca"} Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.147975 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:17 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:17 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:17 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.148474 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.582982 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.688875 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dc9e4681-64e1-4800-8ba0-26f4d5179612-kubelet-dir\") pod \"dc9e4681-64e1-4800-8ba0-26f4d5179612\" (UID: \"dc9e4681-64e1-4800-8ba0-26f4d5179612\") " Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.688986 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc9e4681-64e1-4800-8ba0-26f4d5179612-kube-api-access\") pod \"dc9e4681-64e1-4800-8ba0-26f4d5179612\" (UID: \"dc9e4681-64e1-4800-8ba0-26f4d5179612\") " Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.689010 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dc9e4681-64e1-4800-8ba0-26f4d5179612-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "dc9e4681-64e1-4800-8ba0-26f4d5179612" (UID: "dc9e4681-64e1-4800-8ba0-26f4d5179612"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.689474 4636 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dc9e4681-64e1-4800-8ba0-26f4d5179612-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.709880 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc9e4681-64e1-4800-8ba0-26f4d5179612-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "dc9e4681-64e1-4800-8ba0-26f4d5179612" (UID: "dc9e4681-64e1-4800-8ba0-26f4d5179612"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.791056 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc9e4681-64e1-4800-8ba0-26f4d5179612-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.899095 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dc9e4681-64e1-4800-8ba0-26f4d5179612","Type":"ContainerDied","Data":"e141be80cb19b5f389b4c780dd0ae33952c6da4cde8b3a60ba372e8302326ca9"} Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.899155 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e141be80cb19b5f389b4c780dd0ae33952c6da4cde8b3a60ba372e8302326ca9" Oct 02 21:26:17 crc kubenswrapper[4636]: I1002 21:26:17.899245 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 21:26:18 crc kubenswrapper[4636]: I1002 21:26:18.142615 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:18 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:18 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:18 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:18 crc kubenswrapper[4636]: I1002 21:26:18.142702 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:19 crc kubenswrapper[4636]: I1002 21:26:19.141409 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:19 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:19 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:19 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:19 crc kubenswrapper[4636]: I1002 21:26:19.141497 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:19 crc kubenswrapper[4636]: I1002 21:26:19.696342 4636 patch_prober.go:28] interesting pod/console-f9d7485db-wdwwc container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.30:8443/health\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Oct 02 21:26:19 crc kubenswrapper[4636]: I1002 21:26:19.696435 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-wdwwc" podUID="b4ddd281-18ba-41ec-b5a6-788b9f5a942e" containerName="console" probeResult="failure" output="Get \"https://10.217.0.30:8443/health\": dial tcp 10.217.0.30:8443: connect: connection refused" Oct 02 21:26:19 crc kubenswrapper[4636]: I1002 21:26:19.749187 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:19 crc kubenswrapper[4636]: I1002 21:26:19.749271 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:19 crc kubenswrapper[4636]: I1002 21:26:19.749721 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:19 crc kubenswrapper[4636]: I1002 21:26:19.749831 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:20 crc kubenswrapper[4636]: I1002 21:26:20.157778 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:20 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:20 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:20 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:20 crc kubenswrapper[4636]: I1002 21:26:20.157881 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:21 crc kubenswrapper[4636]: I1002 21:26:21.141933 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:26:21 crc kubenswrapper[4636]: I1002 21:26:21.143573 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:21 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:21 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:21 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:21 crc kubenswrapper[4636]: I1002 21:26:21.143627 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:21 crc kubenswrapper[4636]: I1002 21:26:21.396766 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cppbr" Oct 02 21:26:22 crc kubenswrapper[4636]: I1002 21:26:22.149685 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:22 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:22 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:22 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:22 crc kubenswrapper[4636]: I1002 21:26:22.153482 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:23 crc kubenswrapper[4636]: I1002 21:26:23.119791 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:26:23 crc kubenswrapper[4636]: I1002 21:26:23.119876 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:26:23 crc kubenswrapper[4636]: I1002 21:26:23.144273 4636 patch_prober.go:28] interesting pod/router-default-5444994796-gbfdn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 21:26:23 crc kubenswrapper[4636]: [-]has-synced failed: reason withheld Oct 02 21:26:23 crc kubenswrapper[4636]: [+]process-running ok Oct 02 21:26:23 crc kubenswrapper[4636]: healthz check failed Oct 02 21:26:23 crc kubenswrapper[4636]: I1002 21:26:23.144403 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gbfdn" podUID="5392cef5-bba8-474f-a355-a505f056b6a5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 21:26:24 crc kubenswrapper[4636]: I1002 21:26:24.162018 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:26:24 crc kubenswrapper[4636]: I1002 21:26:24.170507 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-gbfdn" Oct 02 21:26:28 crc kubenswrapper[4636]: I1002 21:26:28.070142 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 21:26:29 crc kubenswrapper[4636]: I1002 21:26:29.721559 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:26:29 crc kubenswrapper[4636]: I1002 21:26:29.726220 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:26:29 crc kubenswrapper[4636]: I1002 21:26:29.749589 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:29 crc kubenswrapper[4636]: I1002 21:26:29.749632 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:29 crc kubenswrapper[4636]: I1002 21:26:29.749675 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:29 crc kubenswrapper[4636]: I1002 21:26:29.749676 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:29 crc kubenswrapper[4636]: I1002 21:26:29.749714 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-n26j2" Oct 02 21:26:29 crc kubenswrapper[4636]: I1002 21:26:29.750285 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"b9904a7732ab69827a5ae0e169ac64349ddc67f98df6d2c6c969c270703be746"} pod="openshift-console/downloads-7954f5f757-n26j2" containerMessage="Container download-server failed liveness probe, will be restarted" Oct 02 21:26:29 crc kubenswrapper[4636]: I1002 21:26:29.750356 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" containerID="cri-o://b9904a7732ab69827a5ae0e169ac64349ddc67f98df6d2c6c969c270703be746" gracePeriod=2 Oct 02 21:26:29 crc kubenswrapper[4636]: I1002 21:26:29.750394 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:29 crc kubenswrapper[4636]: I1002 21:26:29.750414 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:31 crc kubenswrapper[4636]: I1002 21:26:31.060564 4636 generic.go:334] "Generic (PLEG): container finished" podID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerID="b9904a7732ab69827a5ae0e169ac64349ddc67f98df6d2c6c969c270703be746" exitCode=0 Oct 02 21:26:31 crc kubenswrapper[4636]: I1002 21:26:31.060733 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-n26j2" event={"ID":"4dec7d6d-2309-486c-bebe-19dce69f40d8","Type":"ContainerDied","Data":"b9904a7732ab69827a5ae0e169ac64349ddc67f98df6d2c6c969c270703be746"} Oct 02 21:26:34 crc kubenswrapper[4636]: I1002 21:26:34.298012 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:26:39 crc kubenswrapper[4636]: I1002 21:26:39.748510 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:39 crc kubenswrapper[4636]: I1002 21:26:39.749380 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:40 crc kubenswrapper[4636]: I1002 21:26:40.942966 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cf5tr" Oct 02 21:26:49 crc kubenswrapper[4636]: I1002 21:26:49.749366 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:49 crc kubenswrapper[4636]: I1002 21:26:49.749768 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:26:53 crc kubenswrapper[4636]: I1002 21:26:53.117425 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:26:53 crc kubenswrapper[4636]: I1002 21:26:53.118005 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:26:53 crc kubenswrapper[4636]: I1002 21:26:53.118057 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:26:53 crc kubenswrapper[4636]: I1002 21:26:53.118622 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 21:26:53 crc kubenswrapper[4636]: I1002 21:26:53.118692 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488" gracePeriod=600 Oct 02 21:26:54 crc kubenswrapper[4636]: I1002 21:26:54.197285 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488" exitCode=0 Oct 02 21:26:54 crc kubenswrapper[4636]: I1002 21:26:54.197862 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488"} Oct 02 21:26:56 crc kubenswrapper[4636]: E1002 21:26:56.648232 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 02 21:26:56 crc kubenswrapper[4636]: E1002 21:26:56.648924 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4fd7d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-4nfgq_openshift-marketplace(14a6e48e-5e60-4cb6-9c9c-922868118ed4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 21:26:56 crc kubenswrapper[4636]: E1002 21:26:56.650153 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-4nfgq" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" Oct 02 21:26:58 crc kubenswrapper[4636]: E1002 21:26:58.073032 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-4nfgq" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" Oct 02 21:26:58 crc kubenswrapper[4636]: E1002 21:26:58.167216 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 02 21:26:58 crc kubenswrapper[4636]: E1002 21:26:58.167560 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pt8z6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-vhp82_openshift-marketplace(6a2da5b7-15f5-45c5-bb0a-367cddf5e786): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 21:26:58 crc kubenswrapper[4636]: E1002 21:26:58.176919 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-vhp82" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" Oct 02 21:26:58 crc kubenswrapper[4636]: E1002 21:26:58.177715 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 02 21:26:58 crc kubenswrapper[4636]: E1002 21:26:58.178091 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wcrg6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-xbxt6_openshift-marketplace(38b81238-1668-4a16-828f-b8761d6e12d5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 21:26:58 crc kubenswrapper[4636]: E1002 21:26:58.179558 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-xbxt6" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" Oct 02 21:26:59 crc kubenswrapper[4636]: I1002 21:26:59.750323 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:26:59 crc kubenswrapper[4636]: I1002 21:26:59.750391 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:27:00 crc kubenswrapper[4636]: E1002 21:27:00.207260 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-xbxt6" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" Oct 02 21:27:00 crc kubenswrapper[4636]: E1002 21:27:00.207390 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-vhp82" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" Oct 02 21:27:00 crc kubenswrapper[4636]: E1002 21:27:00.855571 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 02 21:27:00 crc kubenswrapper[4636]: E1002 21:27:00.856061 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qqmpn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-h5pjb_openshift-marketplace(efd2d320-6d76-4faa-b1b2-d062fd21cec8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 21:27:00 crc kubenswrapper[4636]: E1002 21:27:00.857242 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-h5pjb" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" Oct 02 21:27:00 crc kubenswrapper[4636]: E1002 21:27:00.919593 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 02 21:27:00 crc kubenswrapper[4636]: E1002 21:27:00.919809 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sd84r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-69pw2_openshift-marketplace(5332eaa0-5912-4363-b6f4-acb85584401e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 21:27:00 crc kubenswrapper[4636]: E1002 21:27:00.921044 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-69pw2" podUID="5332eaa0-5912-4363-b6f4-acb85584401e" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.079888 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.080057 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dbp8p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-tgg4v_openshift-marketplace(825fd9ab-b13c-4a6c-88d2-63710ba4ad41): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.082101 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-tgg4v" podUID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.092450 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.092576 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cnwxj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-wrl7v_openshift-marketplace(72c576ab-c0b7-42ba-9f24-f52f26167a3e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.092980 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.093111 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-77f5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-c6xmc_openshift-marketplace(b900aa72-5621-4793-8652-4e6fb02b02e9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.093740 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-wrl7v" podUID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.094959 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-c6xmc" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" Oct 02 21:27:01 crc kubenswrapper[4636]: I1002 21:27:01.242224 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"26b2e9d95b3bc56cbc9ee3b547158382c6f070be16aa5848fb638c1a78fed6dd"} Oct 02 21:27:01 crc kubenswrapper[4636]: I1002 21:27:01.245960 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-n26j2" event={"ID":"4dec7d6d-2309-486c-bebe-19dce69f40d8","Type":"ContainerStarted","Data":"4adff4d00a7280a5f36177d4ff435b51f8c0ced26c6fea2489e8ccbcea39e6cb"} Oct 02 21:27:01 crc kubenswrapper[4636]: I1002 21:27:01.246453 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:27:01 crc kubenswrapper[4636]: I1002 21:27:01.246490 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:27:01 crc kubenswrapper[4636]: I1002 21:27:01.246636 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-n26j2" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.248693 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-tgg4v" podUID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.248918 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-wrl7v" podUID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.248959 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-c6xmc" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.248998 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-h5pjb" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" Oct 02 21:27:01 crc kubenswrapper[4636]: E1002 21:27:01.249044 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-69pw2" podUID="5332eaa0-5912-4363-b6f4-acb85584401e" Oct 02 21:27:02 crc kubenswrapper[4636]: I1002 21:27:02.257280 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:27:02 crc kubenswrapper[4636]: I1002 21:27:02.257369 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:27:03 crc kubenswrapper[4636]: I1002 21:27:03.262585 4636 patch_prober.go:28] interesting pod/downloads-7954f5f757-n26j2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Oct 02 21:27:03 crc kubenswrapper[4636]: I1002 21:27:03.263104 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n26j2" podUID="4dec7d6d-2309-486c-bebe-19dce69f40d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Oct 02 21:27:09 crc kubenswrapper[4636]: I1002 21:27:09.772418 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-n26j2" Oct 02 21:27:17 crc kubenswrapper[4636]: I1002 21:27:17.343676 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nfgq" event={"ID":"14a6e48e-5e60-4cb6-9c9c-922868118ed4","Type":"ContainerStarted","Data":"edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7"} Oct 02 21:27:18 crc kubenswrapper[4636]: I1002 21:27:18.349729 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5pjb" event={"ID":"efd2d320-6d76-4faa-b1b2-d062fd21cec8","Type":"ContainerStarted","Data":"7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e"} Oct 02 21:27:18 crc kubenswrapper[4636]: I1002 21:27:18.352506 4636 generic.go:334] "Generic (PLEG): container finished" podID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" containerID="70ca57417a2920200001c926bc7436a8d29e63b53030330730c2cc3cad1bc014" exitCode=0 Oct 02 21:27:18 crc kubenswrapper[4636]: I1002 21:27:18.352671 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgg4v" event={"ID":"825fd9ab-b13c-4a6c-88d2-63710ba4ad41","Type":"ContainerDied","Data":"70ca57417a2920200001c926bc7436a8d29e63b53030330730c2cc3cad1bc014"} Oct 02 21:27:18 crc kubenswrapper[4636]: I1002 21:27:18.355070 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6xmc" event={"ID":"b900aa72-5621-4793-8652-4e6fb02b02e9","Type":"ContainerStarted","Data":"d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722"} Oct 02 21:27:18 crc kubenswrapper[4636]: I1002 21:27:18.357508 4636 generic.go:334] "Generic (PLEG): container finished" podID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" containerID="fe3791b0777ff4ed67db3346900711cb5387ecc190cfcb4b90dc3e7aae08f3b7" exitCode=0 Oct 02 21:27:18 crc kubenswrapper[4636]: I1002 21:27:18.357592 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wrl7v" event={"ID":"72c576ab-c0b7-42ba-9f24-f52f26167a3e","Type":"ContainerDied","Data":"fe3791b0777ff4ed67db3346900711cb5387ecc190cfcb4b90dc3e7aae08f3b7"} Oct 02 21:27:18 crc kubenswrapper[4636]: I1002 21:27:18.362468 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xbxt6" event={"ID":"38b81238-1668-4a16-828f-b8761d6e12d5","Type":"ContainerStarted","Data":"021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54"} Oct 02 21:27:18 crc kubenswrapper[4636]: I1002 21:27:18.367951 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69pw2" event={"ID":"5332eaa0-5912-4363-b6f4-acb85584401e","Type":"ContainerStarted","Data":"0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854"} Oct 02 21:27:18 crc kubenswrapper[4636]: I1002 21:27:18.371183 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vhp82" event={"ID":"6a2da5b7-15f5-45c5-bb0a-367cddf5e786","Type":"ContainerStarted","Data":"121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1"} Oct 02 21:27:18 crc kubenswrapper[4636]: I1002 21:27:18.372898 4636 generic.go:334] "Generic (PLEG): container finished" podID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerID="edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7" exitCode=0 Oct 02 21:27:18 crc kubenswrapper[4636]: I1002 21:27:18.373082 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nfgq" event={"ID":"14a6e48e-5e60-4cb6-9c9c-922868118ed4","Type":"ContainerDied","Data":"edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7"} Oct 02 21:27:19 crc kubenswrapper[4636]: I1002 21:27:19.389121 4636 generic.go:334] "Generic (PLEG): container finished" podID="5332eaa0-5912-4363-b6f4-acb85584401e" containerID="0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854" exitCode=0 Oct 02 21:27:19 crc kubenswrapper[4636]: I1002 21:27:19.389242 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69pw2" event={"ID":"5332eaa0-5912-4363-b6f4-acb85584401e","Type":"ContainerDied","Data":"0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854"} Oct 02 21:27:19 crc kubenswrapper[4636]: I1002 21:27:19.395919 4636 generic.go:334] "Generic (PLEG): container finished" podID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerID="121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1" exitCode=0 Oct 02 21:27:19 crc kubenswrapper[4636]: I1002 21:27:19.396037 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vhp82" event={"ID":"6a2da5b7-15f5-45c5-bb0a-367cddf5e786","Type":"ContainerDied","Data":"121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1"} Oct 02 21:27:19 crc kubenswrapper[4636]: I1002 21:27:19.414690 4636 generic.go:334] "Generic (PLEG): container finished" podID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerID="7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e" exitCode=0 Oct 02 21:27:19 crc kubenswrapper[4636]: I1002 21:27:19.414915 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5pjb" event={"ID":"efd2d320-6d76-4faa-b1b2-d062fd21cec8","Type":"ContainerDied","Data":"7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e"} Oct 02 21:27:19 crc kubenswrapper[4636]: I1002 21:27:19.433168 4636 generic.go:334] "Generic (PLEG): container finished" podID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerID="d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722" exitCode=0 Oct 02 21:27:19 crc kubenswrapper[4636]: I1002 21:27:19.433255 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6xmc" event={"ID":"b900aa72-5621-4793-8652-4e6fb02b02e9","Type":"ContainerDied","Data":"d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722"} Oct 02 21:27:19 crc kubenswrapper[4636]: I1002 21:27:19.440176 4636 generic.go:334] "Generic (PLEG): container finished" podID="38b81238-1668-4a16-828f-b8761d6e12d5" containerID="021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54" exitCode=0 Oct 02 21:27:19 crc kubenswrapper[4636]: I1002 21:27:19.440256 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xbxt6" event={"ID":"38b81238-1668-4a16-828f-b8761d6e12d5","Type":"ContainerDied","Data":"021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54"} Oct 02 21:27:24 crc kubenswrapper[4636]: I1002 21:27:24.495124 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nfgq" event={"ID":"14a6e48e-5e60-4cb6-9c9c-922868118ed4","Type":"ContainerStarted","Data":"b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5"} Oct 02 21:27:24 crc kubenswrapper[4636]: I1002 21:27:24.497808 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wrl7v" event={"ID":"72c576ab-c0b7-42ba-9f24-f52f26167a3e","Type":"ContainerStarted","Data":"7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9"} Oct 02 21:27:25 crc kubenswrapper[4636]: I1002 21:27:25.530595 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4nfgq" podStartSLOduration=4.329470077 podStartE2EDuration="1m11.530575544s" podCreationTimestamp="2025-10-02 21:26:14 +0000 UTC" firstStartedPulling="2025-10-02 21:26:15.871597133 +0000 UTC m=+167.194605152" lastFinishedPulling="2025-10-02 21:27:23.07270259 +0000 UTC m=+234.395710619" observedRunningTime="2025-10-02 21:27:25.525381146 +0000 UTC m=+236.848389175" watchObservedRunningTime="2025-10-02 21:27:25.530575544 +0000 UTC m=+236.853583573" Oct 02 21:27:25 crc kubenswrapper[4636]: I1002 21:27:25.553941 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wrl7v" podStartSLOduration=6.192576653 podStartE2EDuration="1m12.553920879s" podCreationTimestamp="2025-10-02 21:26:13 +0000 UTC" firstStartedPulling="2025-10-02 21:26:15.722929647 +0000 UTC m=+167.045937666" lastFinishedPulling="2025-10-02 21:27:22.084273873 +0000 UTC m=+233.407281892" observedRunningTime="2025-10-02 21:27:25.550493761 +0000 UTC m=+236.873501820" watchObservedRunningTime="2025-10-02 21:27:25.553920879 +0000 UTC m=+236.876928918" Oct 02 21:27:30 crc kubenswrapper[4636]: I1002 21:27:30.539380 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xbxt6" event={"ID":"38b81238-1668-4a16-828f-b8761d6e12d5","Type":"ContainerStarted","Data":"09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f"} Oct 02 21:27:30 crc kubenswrapper[4636]: I1002 21:27:30.550925 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vhp82" event={"ID":"6a2da5b7-15f5-45c5-bb0a-367cddf5e786","Type":"ContainerStarted","Data":"98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722"} Oct 02 21:27:30 crc kubenswrapper[4636]: I1002 21:27:30.563048 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgg4v" event={"ID":"825fd9ab-b13c-4a6c-88d2-63710ba4ad41","Type":"ContainerStarted","Data":"8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9"} Oct 02 21:27:30 crc kubenswrapper[4636]: I1002 21:27:30.581378 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xbxt6" podStartSLOduration=4.041511073 podStartE2EDuration="1m16.581365557s" podCreationTimestamp="2025-10-02 21:26:14 +0000 UTC" firstStartedPulling="2025-10-02 21:26:16.844558304 +0000 UTC m=+168.167566323" lastFinishedPulling="2025-10-02 21:27:29.384412788 +0000 UTC m=+240.707420807" observedRunningTime="2025-10-02 21:27:30.578587008 +0000 UTC m=+241.901595017" watchObservedRunningTime="2025-10-02 21:27:30.581365557 +0000 UTC m=+241.904373576" Oct 02 21:27:30 crc kubenswrapper[4636]: I1002 21:27:30.610655 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vhp82" podStartSLOduration=3.113999537 podStartE2EDuration="1m19.610640471s" podCreationTimestamp="2025-10-02 21:26:11 +0000 UTC" firstStartedPulling="2025-10-02 21:26:13.691860627 +0000 UTC m=+165.014868686" lastFinishedPulling="2025-10-02 21:27:30.188501581 +0000 UTC m=+241.511509620" observedRunningTime="2025-10-02 21:27:30.606805002 +0000 UTC m=+241.929813031" watchObservedRunningTime="2025-10-02 21:27:30.610640471 +0000 UTC m=+241.933648490" Oct 02 21:27:31 crc kubenswrapper[4636]: I1002 21:27:31.570857 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69pw2" event={"ID":"5332eaa0-5912-4363-b6f4-acb85584401e","Type":"ContainerStarted","Data":"5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43"} Oct 02 21:27:31 crc kubenswrapper[4636]: I1002 21:27:31.574308 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5pjb" event={"ID":"efd2d320-6d76-4faa-b1b2-d062fd21cec8","Type":"ContainerStarted","Data":"8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1"} Oct 02 21:27:31 crc kubenswrapper[4636]: I1002 21:27:31.576805 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6xmc" event={"ID":"b900aa72-5621-4793-8652-4e6fb02b02e9","Type":"ContainerStarted","Data":"0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27"} Oct 02 21:27:31 crc kubenswrapper[4636]: I1002 21:27:31.589179 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tgg4v" podStartSLOduration=3.07297046 podStartE2EDuration="1m18.589157007s" podCreationTimestamp="2025-10-02 21:26:13 +0000 UTC" firstStartedPulling="2025-10-02 21:26:14.694002182 +0000 UTC m=+166.017010201" lastFinishedPulling="2025-10-02 21:27:30.210188729 +0000 UTC m=+241.533196748" observedRunningTime="2025-10-02 21:27:30.638759453 +0000 UTC m=+241.961767472" watchObservedRunningTime="2025-10-02 21:27:31.589157007 +0000 UTC m=+242.912165026" Oct 02 21:27:31 crc kubenswrapper[4636]: I1002 21:27:31.590548 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-69pw2" podStartSLOduration=4.090133302 podStartE2EDuration="1m20.590542036s" podCreationTimestamp="2025-10-02 21:26:11 +0000 UTC" firstStartedPulling="2025-10-02 21:26:13.695025215 +0000 UTC m=+165.018033234" lastFinishedPulling="2025-10-02 21:27:30.195433949 +0000 UTC m=+241.518441968" observedRunningTime="2025-10-02 21:27:31.589906948 +0000 UTC m=+242.912914967" watchObservedRunningTime="2025-10-02 21:27:31.590542036 +0000 UTC m=+242.913550075" Oct 02 21:27:31 crc kubenswrapper[4636]: I1002 21:27:31.634446 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c6xmc" podStartSLOduration=4.065228828 podStartE2EDuration="1m20.634430317s" podCreationTimestamp="2025-10-02 21:26:11 +0000 UTC" firstStartedPulling="2025-10-02 21:26:13.666094556 +0000 UTC m=+164.989102575" lastFinishedPulling="2025-10-02 21:27:30.235296045 +0000 UTC m=+241.558304064" observedRunningTime="2025-10-02 21:27:31.633594033 +0000 UTC m=+242.956602052" watchObservedRunningTime="2025-10-02 21:27:31.634430317 +0000 UTC m=+242.957438336" Oct 02 21:27:31 crc kubenswrapper[4636]: I1002 21:27:31.667314 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h5pjb" podStartSLOduration=4.119211149 podStartE2EDuration="1m20.667299394s" podCreationTimestamp="2025-10-02 21:26:11 +0000 UTC" firstStartedPulling="2025-10-02 21:26:13.66263796 +0000 UTC m=+164.985645979" lastFinishedPulling="2025-10-02 21:27:30.210726195 +0000 UTC m=+241.533734224" observedRunningTime="2025-10-02 21:27:31.665145562 +0000 UTC m=+242.988153581" watchObservedRunningTime="2025-10-02 21:27:31.667299394 +0000 UTC m=+242.990307413" Oct 02 21:27:31 crc kubenswrapper[4636]: I1002 21:27:31.797056 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:27:31 crc kubenswrapper[4636]: I1002 21:27:31.797105 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:27:31 crc kubenswrapper[4636]: I1002 21:27:31.972971 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:27:31 crc kubenswrapper[4636]: I1002 21:27:31.973664 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:27:32 crc kubenswrapper[4636]: I1002 21:27:32.121353 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:27:32 crc kubenswrapper[4636]: I1002 21:27:32.121690 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:27:33 crc kubenswrapper[4636]: I1002 21:27:33.034978 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-h5pjb" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerName="registry-server" probeResult="failure" output=< Oct 02 21:27:33 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 21:27:33 crc kubenswrapper[4636]: > Oct 02 21:27:33 crc kubenswrapper[4636]: I1002 21:27:33.057798 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-vhp82" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerName="registry-server" probeResult="failure" output=< Oct 02 21:27:33 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 21:27:33 crc kubenswrapper[4636]: > Oct 02 21:27:33 crc kubenswrapper[4636]: I1002 21:27:33.173942 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-c6xmc" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerName="registry-server" probeResult="failure" output=< Oct 02 21:27:33 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 21:27:33 crc kubenswrapper[4636]: > Oct 02 21:27:33 crc kubenswrapper[4636]: I1002 21:27:33.668294 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:27:33 crc kubenswrapper[4636]: I1002 21:27:33.668344 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:27:33 crc kubenswrapper[4636]: I1002 21:27:33.711785 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:27:34 crc kubenswrapper[4636]: I1002 21:27:34.082063 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:27:34 crc kubenswrapper[4636]: I1002 21:27:34.082686 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:27:34 crc kubenswrapper[4636]: I1002 21:27:34.139774 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:27:34 crc kubenswrapper[4636]: I1002 21:27:34.268723 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76lrr"] Oct 02 21:27:34 crc kubenswrapper[4636]: I1002 21:27:34.624352 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:27:34 crc kubenswrapper[4636]: I1002 21:27:34.688980 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:27:34 crc kubenswrapper[4636]: I1002 21:27:34.689023 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:27:34 crc kubenswrapper[4636]: I1002 21:27:34.720692 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:27:35 crc kubenswrapper[4636]: I1002 21:27:35.124934 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:27:35 crc kubenswrapper[4636]: I1002 21:27:35.125092 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:27:35 crc kubenswrapper[4636]: I1002 21:27:35.648471 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:27:36 crc kubenswrapper[4636]: I1002 21:27:36.007053 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wrl7v"] Oct 02 21:27:36 crc kubenswrapper[4636]: I1002 21:27:36.166791 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xbxt6" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" containerName="registry-server" probeResult="failure" output=< Oct 02 21:27:36 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 21:27:36 crc kubenswrapper[4636]: > Oct 02 21:27:36 crc kubenswrapper[4636]: I1002 21:27:36.604167 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wrl7v" podUID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" containerName="registry-server" containerID="cri-o://7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9" gracePeriod=2 Oct 02 21:27:36 crc kubenswrapper[4636]: I1002 21:27:36.938067 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.000687 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-utilities\") pod \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.000770 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-catalog-content\") pod \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.000830 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnwxj\" (UniqueName: \"kubernetes.io/projected/72c576ab-c0b7-42ba-9f24-f52f26167a3e-kube-api-access-cnwxj\") pod \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\" (UID: \"72c576ab-c0b7-42ba-9f24-f52f26167a3e\") " Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.001957 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-utilities" (OuterVolumeSpecName: "utilities") pod "72c576ab-c0b7-42ba-9f24-f52f26167a3e" (UID: "72c576ab-c0b7-42ba-9f24-f52f26167a3e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.010002 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72c576ab-c0b7-42ba-9f24-f52f26167a3e-kube-api-access-cnwxj" (OuterVolumeSpecName: "kube-api-access-cnwxj") pod "72c576ab-c0b7-42ba-9f24-f52f26167a3e" (UID: "72c576ab-c0b7-42ba-9f24-f52f26167a3e"). InnerVolumeSpecName "kube-api-access-cnwxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.014567 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "72c576ab-c0b7-42ba-9f24-f52f26167a3e" (UID: "72c576ab-c0b7-42ba-9f24-f52f26167a3e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.103215 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.103428 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c576ab-c0b7-42ba-9f24-f52f26167a3e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.103442 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnwxj\" (UniqueName: \"kubernetes.io/projected/72c576ab-c0b7-42ba-9f24-f52f26167a3e-kube-api-access-cnwxj\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.611482 4636 generic.go:334] "Generic (PLEG): container finished" podID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" containerID="7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9" exitCode=0 Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.611579 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wrl7v" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.614700 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wrl7v" event={"ID":"72c576ab-c0b7-42ba-9f24-f52f26167a3e","Type":"ContainerDied","Data":"7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9"} Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.614798 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wrl7v" event={"ID":"72c576ab-c0b7-42ba-9f24-f52f26167a3e","Type":"ContainerDied","Data":"a03dee2f50e190adfa0cac5d1911371cba612668429978ba893c19e7e2ba474e"} Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.614830 4636 scope.go:117] "RemoveContainer" containerID="7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.631635 4636 scope.go:117] "RemoveContainer" containerID="fe3791b0777ff4ed67db3346900711cb5387ecc190cfcb4b90dc3e7aae08f3b7" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.645428 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wrl7v"] Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.648320 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wrl7v"] Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.657235 4636 scope.go:117] "RemoveContainer" containerID="8676985073b88033bffeee576b7d94b662b8683511ba3b6602f0940db98f3361" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.672522 4636 scope.go:117] "RemoveContainer" containerID="7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9" Oct 02 21:27:37 crc kubenswrapper[4636]: E1002 21:27:37.672836 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9\": container with ID starting with 7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9 not found: ID does not exist" containerID="7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.672869 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9"} err="failed to get container status \"7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9\": rpc error: code = NotFound desc = could not find container \"7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9\": container with ID starting with 7c22c647b6fea94c8aafe067a53b84bdf77d1ec6347dfdb991585e23557a32d9 not found: ID does not exist" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.672889 4636 scope.go:117] "RemoveContainer" containerID="fe3791b0777ff4ed67db3346900711cb5387ecc190cfcb4b90dc3e7aae08f3b7" Oct 02 21:27:37 crc kubenswrapper[4636]: E1002 21:27:37.673072 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe3791b0777ff4ed67db3346900711cb5387ecc190cfcb4b90dc3e7aae08f3b7\": container with ID starting with fe3791b0777ff4ed67db3346900711cb5387ecc190cfcb4b90dc3e7aae08f3b7 not found: ID does not exist" containerID="fe3791b0777ff4ed67db3346900711cb5387ecc190cfcb4b90dc3e7aae08f3b7" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.673093 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe3791b0777ff4ed67db3346900711cb5387ecc190cfcb4b90dc3e7aae08f3b7"} err="failed to get container status \"fe3791b0777ff4ed67db3346900711cb5387ecc190cfcb4b90dc3e7aae08f3b7\": rpc error: code = NotFound desc = could not find container \"fe3791b0777ff4ed67db3346900711cb5387ecc190cfcb4b90dc3e7aae08f3b7\": container with ID starting with fe3791b0777ff4ed67db3346900711cb5387ecc190cfcb4b90dc3e7aae08f3b7 not found: ID does not exist" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.673107 4636 scope.go:117] "RemoveContainer" containerID="8676985073b88033bffeee576b7d94b662b8683511ba3b6602f0940db98f3361" Oct 02 21:27:37 crc kubenswrapper[4636]: E1002 21:27:37.673274 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8676985073b88033bffeee576b7d94b662b8683511ba3b6602f0940db98f3361\": container with ID starting with 8676985073b88033bffeee576b7d94b662b8683511ba3b6602f0940db98f3361 not found: ID does not exist" containerID="8676985073b88033bffeee576b7d94b662b8683511ba3b6602f0940db98f3361" Oct 02 21:27:37 crc kubenswrapper[4636]: I1002 21:27:37.673292 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8676985073b88033bffeee576b7d94b662b8683511ba3b6602f0940db98f3361"} err="failed to get container status \"8676985073b88033bffeee576b7d94b662b8683511ba3b6602f0940db98f3361\": rpc error: code = NotFound desc = could not find container \"8676985073b88033bffeee576b7d94b662b8683511ba3b6602f0940db98f3361\": container with ID starting with 8676985073b88033bffeee576b7d94b662b8683511ba3b6602f0940db98f3361 not found: ID does not exist" Oct 02 21:27:39 crc kubenswrapper[4636]: I1002 21:27:39.616825 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" path="/var/lib/kubelet/pods/72c576ab-c0b7-42ba-9f24-f52f26167a3e/volumes" Oct 02 21:27:41 crc kubenswrapper[4636]: I1002 21:27:41.551052 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:27:41 crc kubenswrapper[4636]: I1002 21:27:41.551352 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:27:41 crc kubenswrapper[4636]: I1002 21:27:41.592165 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:27:41 crc kubenswrapper[4636]: I1002 21:27:41.675187 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:27:41 crc kubenswrapper[4636]: I1002 21:27:41.834226 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:27:41 crc kubenswrapper[4636]: I1002 21:27:41.868886 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:27:42 crc kubenswrapper[4636]: I1002 21:27:42.009767 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:27:42 crc kubenswrapper[4636]: I1002 21:27:42.055509 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:27:42 crc kubenswrapper[4636]: I1002 21:27:42.154141 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:27:42 crc kubenswrapper[4636]: I1002 21:27:42.195777 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:27:43 crc kubenswrapper[4636]: I1002 21:27:43.704886 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.007431 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c6xmc"] Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.008027 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c6xmc" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerName="registry-server" containerID="cri-o://0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27" gracePeriod=2 Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.210101 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vhp82"] Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.210556 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vhp82" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerName="registry-server" containerID="cri-o://98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722" gracePeriod=2 Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.357886 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.399703 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-catalog-content\") pod \"b900aa72-5621-4793-8652-4e6fb02b02e9\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.399867 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-utilities\") pod \"b900aa72-5621-4793-8652-4e6fb02b02e9\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.399889 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77f5t\" (UniqueName: \"kubernetes.io/projected/b900aa72-5621-4793-8652-4e6fb02b02e9-kube-api-access-77f5t\") pod \"b900aa72-5621-4793-8652-4e6fb02b02e9\" (UID: \"b900aa72-5621-4793-8652-4e6fb02b02e9\") " Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.400836 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-utilities" (OuterVolumeSpecName: "utilities") pod "b900aa72-5621-4793-8652-4e6fb02b02e9" (UID: "b900aa72-5621-4793-8652-4e6fb02b02e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.411871 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b900aa72-5621-4793-8652-4e6fb02b02e9-kube-api-access-77f5t" (OuterVolumeSpecName: "kube-api-access-77f5t") pod "b900aa72-5621-4793-8652-4e6fb02b02e9" (UID: "b900aa72-5621-4793-8652-4e6fb02b02e9"). InnerVolumeSpecName "kube-api-access-77f5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.451225 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b900aa72-5621-4793-8652-4e6fb02b02e9" (UID: "b900aa72-5621-4793-8652-4e6fb02b02e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.501574 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.501601 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b900aa72-5621-4793-8652-4e6fb02b02e9-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.501611 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77f5t\" (UniqueName: \"kubernetes.io/projected/b900aa72-5621-4793-8652-4e6fb02b02e9-kube-api-access-77f5t\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.544284 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.602816 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-catalog-content\") pod \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.602945 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-utilities\") pod \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.602976 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pt8z6\" (UniqueName: \"kubernetes.io/projected/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-kube-api-access-pt8z6\") pod \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\" (UID: \"6a2da5b7-15f5-45c5-bb0a-367cddf5e786\") " Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.603652 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-utilities" (OuterVolumeSpecName: "utilities") pod "6a2da5b7-15f5-45c5-bb0a-367cddf5e786" (UID: "6a2da5b7-15f5-45c5-bb0a-367cddf5e786"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.605901 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-kube-api-access-pt8z6" (OuterVolumeSpecName: "kube-api-access-pt8z6") pod "6a2da5b7-15f5-45c5-bb0a-367cddf5e786" (UID: "6a2da5b7-15f5-45c5-bb0a-367cddf5e786"). InnerVolumeSpecName "kube-api-access-pt8z6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.646306 4636 generic.go:334] "Generic (PLEG): container finished" podID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerID="98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722" exitCode=0 Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.646358 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vhp82" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.646366 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vhp82" event={"ID":"6a2da5b7-15f5-45c5-bb0a-367cddf5e786","Type":"ContainerDied","Data":"98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722"} Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.646468 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vhp82" event={"ID":"6a2da5b7-15f5-45c5-bb0a-367cddf5e786","Type":"ContainerDied","Data":"f85b7e5cd48f9fbbaf920bc269b5b4bad35c55f077a01596da925f4d4acdbc9d"} Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.646487 4636 scope.go:117] "RemoveContainer" containerID="98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.648336 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6a2da5b7-15f5-45c5-bb0a-367cddf5e786" (UID: "6a2da5b7-15f5-45c5-bb0a-367cddf5e786"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.649568 4636 generic.go:334] "Generic (PLEG): container finished" podID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerID="0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27" exitCode=0 Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.649606 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6xmc" event={"ID":"b900aa72-5621-4793-8652-4e6fb02b02e9","Type":"ContainerDied","Data":"0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27"} Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.649632 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6xmc" event={"ID":"b900aa72-5621-4793-8652-4e6fb02b02e9","Type":"ContainerDied","Data":"65e10950b27a9fc45bf77646cde10d8d046517cb9452518abf5d730cd291b4d4"} Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.649690 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6xmc" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.667223 4636 scope.go:117] "RemoveContainer" containerID="121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.690728 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c6xmc"] Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.694734 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c6xmc"] Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.696657 4636 scope.go:117] "RemoveContainer" containerID="f257b53318b3194802cfc8589f772b02aefed7afdcf526c367bce3fd473fc098" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.704521 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.704545 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pt8z6\" (UniqueName: \"kubernetes.io/projected/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-kube-api-access-pt8z6\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.704555 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2da5b7-15f5-45c5-bb0a-367cddf5e786-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.724509 4636 scope.go:117] "RemoveContainer" containerID="98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722" Oct 02 21:27:44 crc kubenswrapper[4636]: E1002 21:27:44.724963 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722\": container with ID starting with 98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722 not found: ID does not exist" containerID="98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.726150 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722"} err="failed to get container status \"98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722\": rpc error: code = NotFound desc = could not find container \"98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722\": container with ID starting with 98e33811c76029e57ab6dbb30f6205f8d35f8da7093b9718129f704ab89cc722 not found: ID does not exist" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.726190 4636 scope.go:117] "RemoveContainer" containerID="121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1" Oct 02 21:27:44 crc kubenswrapper[4636]: E1002 21:27:44.726651 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1\": container with ID starting with 121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1 not found: ID does not exist" containerID="121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.726774 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1"} err="failed to get container status \"121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1\": rpc error: code = NotFound desc = could not find container \"121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1\": container with ID starting with 121573e5a893fb190d8cfb24b26898e7ad213e0108b36ac6ac99da2bc1876de1 not found: ID does not exist" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.726870 4636 scope.go:117] "RemoveContainer" containerID="f257b53318b3194802cfc8589f772b02aefed7afdcf526c367bce3fd473fc098" Oct 02 21:27:44 crc kubenswrapper[4636]: E1002 21:27:44.727273 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f257b53318b3194802cfc8589f772b02aefed7afdcf526c367bce3fd473fc098\": container with ID starting with f257b53318b3194802cfc8589f772b02aefed7afdcf526c367bce3fd473fc098 not found: ID does not exist" containerID="f257b53318b3194802cfc8589f772b02aefed7afdcf526c367bce3fd473fc098" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.727303 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f257b53318b3194802cfc8589f772b02aefed7afdcf526c367bce3fd473fc098"} err="failed to get container status \"f257b53318b3194802cfc8589f772b02aefed7afdcf526c367bce3fd473fc098\": rpc error: code = NotFound desc = could not find container \"f257b53318b3194802cfc8589f772b02aefed7afdcf526c367bce3fd473fc098\": container with ID starting with f257b53318b3194802cfc8589f772b02aefed7afdcf526c367bce3fd473fc098 not found: ID does not exist" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.727324 4636 scope.go:117] "RemoveContainer" containerID="0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.738623 4636 scope.go:117] "RemoveContainer" containerID="d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.750152 4636 scope.go:117] "RemoveContainer" containerID="931cacb7e8549e74fee10905c03976ac516c270f9a41868f88e440b2a19590ba" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.761528 4636 scope.go:117] "RemoveContainer" containerID="0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27" Oct 02 21:27:44 crc kubenswrapper[4636]: E1002 21:27:44.761833 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27\": container with ID starting with 0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27 not found: ID does not exist" containerID="0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.761862 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27"} err="failed to get container status \"0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27\": rpc error: code = NotFound desc = could not find container \"0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27\": container with ID starting with 0111a026d184e94272a227a9166ed7756e9d0c4eb433e7a1349484f6dde64b27 not found: ID does not exist" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.761878 4636 scope.go:117] "RemoveContainer" containerID="d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722" Oct 02 21:27:44 crc kubenswrapper[4636]: E1002 21:27:44.762195 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722\": container with ID starting with d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722 not found: ID does not exist" containerID="d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.762294 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722"} err="failed to get container status \"d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722\": rpc error: code = NotFound desc = could not find container \"d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722\": container with ID starting with d292420d10981bce528a68ead16d137be8b96b14e10bf4e3f9899a9e88f9e722 not found: ID does not exist" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.762378 4636 scope.go:117] "RemoveContainer" containerID="931cacb7e8549e74fee10905c03976ac516c270f9a41868f88e440b2a19590ba" Oct 02 21:27:44 crc kubenswrapper[4636]: E1002 21:27:44.762741 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"931cacb7e8549e74fee10905c03976ac516c270f9a41868f88e440b2a19590ba\": container with ID starting with 931cacb7e8549e74fee10905c03976ac516c270f9a41868f88e440b2a19590ba not found: ID does not exist" containerID="931cacb7e8549e74fee10905c03976ac516c270f9a41868f88e440b2a19590ba" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.762819 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"931cacb7e8549e74fee10905c03976ac516c270f9a41868f88e440b2a19590ba"} err="failed to get container status \"931cacb7e8549e74fee10905c03976ac516c270f9a41868f88e440b2a19590ba\": rpc error: code = NotFound desc = could not find container \"931cacb7e8549e74fee10905c03976ac516c270f9a41868f88e440b2a19590ba\": container with ID starting with 931cacb7e8549e74fee10905c03976ac516c270f9a41868f88e440b2a19590ba not found: ID does not exist" Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.971256 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vhp82"] Oct 02 21:27:44 crc kubenswrapper[4636]: I1002 21:27:44.973367 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vhp82"] Oct 02 21:27:45 crc kubenswrapper[4636]: I1002 21:27:45.166718 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:27:45 crc kubenswrapper[4636]: I1002 21:27:45.206060 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:27:45 crc kubenswrapper[4636]: I1002 21:27:45.609426 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" path="/var/lib/kubelet/pods/6a2da5b7-15f5-45c5-bb0a-367cddf5e786/volumes" Oct 02 21:27:45 crc kubenswrapper[4636]: I1002 21:27:45.610248 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" path="/var/lib/kubelet/pods/b900aa72-5621-4793-8652-4e6fb02b02e9/volumes" Oct 02 21:27:48 crc kubenswrapper[4636]: I1002 21:27:48.612128 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xbxt6"] Oct 02 21:27:48 crc kubenswrapper[4636]: I1002 21:27:48.612603 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xbxt6" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" containerName="registry-server" containerID="cri-o://09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f" gracePeriod=2 Oct 02 21:27:48 crc kubenswrapper[4636]: I1002 21:27:48.961267 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.051389 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcrg6\" (UniqueName: \"kubernetes.io/projected/38b81238-1668-4a16-828f-b8761d6e12d5-kube-api-access-wcrg6\") pod \"38b81238-1668-4a16-828f-b8761d6e12d5\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.051481 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-utilities\") pod \"38b81238-1668-4a16-828f-b8761d6e12d5\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.051501 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-catalog-content\") pod \"38b81238-1668-4a16-828f-b8761d6e12d5\" (UID: \"38b81238-1668-4a16-828f-b8761d6e12d5\") " Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.052597 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-utilities" (OuterVolumeSpecName: "utilities") pod "38b81238-1668-4a16-828f-b8761d6e12d5" (UID: "38b81238-1668-4a16-828f-b8761d6e12d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.056736 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38b81238-1668-4a16-828f-b8761d6e12d5-kube-api-access-wcrg6" (OuterVolumeSpecName: "kube-api-access-wcrg6") pod "38b81238-1668-4a16-828f-b8761d6e12d5" (UID: "38b81238-1668-4a16-828f-b8761d6e12d5"). InnerVolumeSpecName "kube-api-access-wcrg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.123288 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "38b81238-1668-4a16-828f-b8761d6e12d5" (UID: "38b81238-1668-4a16-828f-b8761d6e12d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.153290 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.153328 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b81238-1668-4a16-828f-b8761d6e12d5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.153345 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcrg6\" (UniqueName: \"kubernetes.io/projected/38b81238-1668-4a16-828f-b8761d6e12d5-kube-api-access-wcrg6\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.675900 4636 generic.go:334] "Generic (PLEG): container finished" podID="38b81238-1668-4a16-828f-b8761d6e12d5" containerID="09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f" exitCode=0 Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.675948 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xbxt6" event={"ID":"38b81238-1668-4a16-828f-b8761d6e12d5","Type":"ContainerDied","Data":"09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f"} Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.675978 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xbxt6" event={"ID":"38b81238-1668-4a16-828f-b8761d6e12d5","Type":"ContainerDied","Data":"e8274a0dff82dc949f303b8920451b08b28de405340a07afbcbf98676e6446ca"} Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.675995 4636 scope.go:117] "RemoveContainer" containerID="09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.676107 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xbxt6" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.692401 4636 scope.go:117] "RemoveContainer" containerID="021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.694274 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xbxt6"] Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.697381 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xbxt6"] Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.707455 4636 scope.go:117] "RemoveContainer" containerID="c85b1f64edefe3ea1f48852a1de579fe71c9936e4818b507694c9e947d873b72" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.726141 4636 scope.go:117] "RemoveContainer" containerID="09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f" Oct 02 21:27:49 crc kubenswrapper[4636]: E1002 21:27:49.726501 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f\": container with ID starting with 09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f not found: ID does not exist" containerID="09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.726624 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f"} err="failed to get container status \"09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f\": rpc error: code = NotFound desc = could not find container \"09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f\": container with ID starting with 09c8d5ffeab2ade41e081d6062b3d9a564918b4814eefc579bcab7cf91ca959f not found: ID does not exist" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.726783 4636 scope.go:117] "RemoveContainer" containerID="021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54" Oct 02 21:27:49 crc kubenswrapper[4636]: E1002 21:27:49.727301 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54\": container with ID starting with 021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54 not found: ID does not exist" containerID="021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.727346 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54"} err="failed to get container status \"021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54\": rpc error: code = NotFound desc = could not find container \"021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54\": container with ID starting with 021e2df9c4167015ea4a0e3aedd8615089c08686b42ab73e75512f5fdaa1ae54 not found: ID does not exist" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.727378 4636 scope.go:117] "RemoveContainer" containerID="c85b1f64edefe3ea1f48852a1de579fe71c9936e4818b507694c9e947d873b72" Oct 02 21:27:49 crc kubenswrapper[4636]: E1002 21:27:49.727724 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c85b1f64edefe3ea1f48852a1de579fe71c9936e4818b507694c9e947d873b72\": container with ID starting with c85b1f64edefe3ea1f48852a1de579fe71c9936e4818b507694c9e947d873b72 not found: ID does not exist" containerID="c85b1f64edefe3ea1f48852a1de579fe71c9936e4818b507694c9e947d873b72" Oct 02 21:27:49 crc kubenswrapper[4636]: I1002 21:27:49.727876 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c85b1f64edefe3ea1f48852a1de579fe71c9936e4818b507694c9e947d873b72"} err="failed to get container status \"c85b1f64edefe3ea1f48852a1de579fe71c9936e4818b507694c9e947d873b72\": rpc error: code = NotFound desc = could not find container \"c85b1f64edefe3ea1f48852a1de579fe71c9936e4818b507694c9e947d873b72\": container with ID starting with c85b1f64edefe3ea1f48852a1de579fe71c9936e4818b507694c9e947d873b72 not found: ID does not exist" Oct 02 21:27:51 crc kubenswrapper[4636]: I1002 21:27:51.613685 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" path="/var/lib/kubelet/pods/38b81238-1668-4a16-828f-b8761d6e12d5/volumes" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.300127 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" podUID="776d4066-e52b-45f4-8d1c-eaad48feabc9" containerName="oauth-openshift" containerID="cri-o://5d517207bac2087b18bf8dd4464f0b4fe8ab8a009301b9fa1aa8e5967733fa08" gracePeriod=15 Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.742690 4636 generic.go:334] "Generic (PLEG): container finished" podID="776d4066-e52b-45f4-8d1c-eaad48feabc9" containerID="5d517207bac2087b18bf8dd4464f0b4fe8ab8a009301b9fa1aa8e5967733fa08" exitCode=0 Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.742735 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" event={"ID":"776d4066-e52b-45f4-8d1c-eaad48feabc9","Type":"ContainerDied","Data":"5d517207bac2087b18bf8dd4464f0b4fe8ab8a009301b9fa1aa8e5967733fa08"} Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.742778 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" event={"ID":"776d4066-e52b-45f4-8d1c-eaad48feabc9","Type":"ContainerDied","Data":"c62e18b2e27bc31fa41f3c89441137baa4f350ee9798b84dc82a6a8bc345b082"} Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.742792 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c62e18b2e27bc31fa41f3c89441137baa4f350ee9798b84dc82a6a8bc345b082" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.745586 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883295 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-service-ca\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883380 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-ocp-branding-template\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883440 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-serving-cert\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883471 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-policies\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883494 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4m8z\" (UniqueName: \"kubernetes.io/projected/776d4066-e52b-45f4-8d1c-eaad48feabc9-kube-api-access-w4m8z\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883585 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-trusted-ca-bundle\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883679 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-dir\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883704 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-cliconfig\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883762 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-router-certs\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883790 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-session\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883834 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-error\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883864 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-idp-0-file-data\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883887 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-provider-selection\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.883915 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-login\") pod \"776d4066-e52b-45f4-8d1c-eaad48feabc9\" (UID: \"776d4066-e52b-45f4-8d1c-eaad48feabc9\") " Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.884776 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.884967 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.885327 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.885789 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.886440 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.890540 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.890808 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.891039 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.892972 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/776d4066-e52b-45f4-8d1c-eaad48feabc9-kube-api-access-w4m8z" (OuterVolumeSpecName: "kube-api-access-w4m8z") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "kube-api-access-w4m8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.893144 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.893709 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.894136 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.894199 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.900298 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "776d4066-e52b-45f4-8d1c-eaad48feabc9" (UID: "776d4066-e52b-45f4-8d1c-eaad48feabc9"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.985853 4636 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.985911 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.985934 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.985954 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.985976 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.985994 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.986012 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.986033 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.986050 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.986068 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.986086 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.986104 4636 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.986157 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4m8z\" (UniqueName: \"kubernetes.io/projected/776d4066-e52b-45f4-8d1c-eaad48feabc9-kube-api-access-w4m8z\") on node \"crc\" DevicePath \"\"" Oct 02 21:27:59 crc kubenswrapper[4636]: I1002 21:27:59.986178 4636 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/776d4066-e52b-45f4-8d1c-eaad48feabc9-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:00 crc kubenswrapper[4636]: I1002 21:28:00.750471 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-76lrr" Oct 02 21:28:00 crc kubenswrapper[4636]: I1002 21:28:00.795912 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76lrr"] Oct 02 21:28:00 crc kubenswrapper[4636]: I1002 21:28:00.800464 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76lrr"] Oct 02 21:28:01 crc kubenswrapper[4636]: I1002 21:28:01.612612 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="776d4066-e52b-45f4-8d1c-eaad48feabc9" path="/var/lib/kubelet/pods/776d4066-e52b-45f4-8d1c-eaad48feabc9/volumes" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.943896 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq"] Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.944741 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="776d4066-e52b-45f4-8d1c-eaad48feabc9" containerName="oauth-openshift" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.944790 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="776d4066-e52b-45f4-8d1c-eaad48feabc9" containerName="oauth-openshift" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.944810 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerName="extract-content" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.944822 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerName="extract-content" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.944836 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c7843cb-a7a2-4db4-b244-f88476448291" containerName="collect-profiles" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.944848 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c7843cb-a7a2-4db4-b244-f88476448291" containerName="collect-profiles" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.944867 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.944880 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.944899 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc9e4681-64e1-4800-8ba0-26f4d5179612" containerName="pruner" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.944912 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc9e4681-64e1-4800-8ba0-26f4d5179612" containerName="pruner" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.944931 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" containerName="extract-utilities" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.944942 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" containerName="extract-utilities" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.944955 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerName="extract-content" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.944969 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerName="extract-content" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.944987 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerName="extract-utilities" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.944999 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerName="extract-utilities" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.945016 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" containerName="extract-content" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945029 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" containerName="extract-content" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.945044 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945055 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.945069 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945081 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.945100 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerName="extract-utilities" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945113 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerName="extract-utilities" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.945128 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" containerName="extract-content" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945140 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" containerName="extract-content" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.945155 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945166 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: E1002 21:28:06.945183 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" containerName="extract-utilities" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945195 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" containerName="extract-utilities" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945349 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="38b81238-1668-4a16-828f-b8761d6e12d5" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945376 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a2da5b7-15f5-45c5-bb0a-367cddf5e786" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945390 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="72c576ab-c0b7-42ba-9f24-f52f26167a3e" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945408 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="776d4066-e52b-45f4-8d1c-eaad48feabc9" containerName="oauth-openshift" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945425 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc9e4681-64e1-4800-8ba0-26f4d5179612" containerName="pruner" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945439 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c7843cb-a7a2-4db4-b244-f88476448291" containerName="collect-profiles" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.945457 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b900aa72-5621-4793-8652-4e6fb02b02e9" containerName="registry-server" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.946093 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.950335 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.954844 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.955734 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.956075 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.956178 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.956617 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.958606 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.959813 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.960099 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.960288 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.960677 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.961666 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.968608 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.970358 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.970560 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.970696 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-audit-policies\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.970923 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.971208 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-serving-cert\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.971410 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-router-certs\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.971566 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-session\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.971717 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-audit-dir\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.971897 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snw6p\" (UniqueName: \"kubernetes.io/projected/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-kube-api-access-snw6p\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.972051 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-cliconfig\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.972193 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-service-ca\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.972349 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-template-login\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.972505 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-template-error\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.972678 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.971573 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.975676 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 02 21:28:06 crc kubenswrapper[4636]: I1002 21:28:06.980685 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq"] Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.073982 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-template-login\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074026 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-template-error\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074060 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074088 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074114 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074158 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-audit-policies\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074189 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074208 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-serving-cert\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074237 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-session\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074255 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-router-certs\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074273 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-audit-dir\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074293 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snw6p\" (UniqueName: \"kubernetes.io/projected/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-kube-api-access-snw6p\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074313 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-cliconfig\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074332 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-service-ca\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.074982 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-service-ca\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.075028 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-audit-dir\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.075879 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-cliconfig\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.076795 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.077454 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-audit-policies\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.080700 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.080704 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-session\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.081568 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-serving-cert\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.082015 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-router-certs\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.083832 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.089167 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-template-error\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.093293 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.093870 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-v4-0-config-user-template-login\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.096791 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snw6p\" (UniqueName: \"kubernetes.io/projected/9f1c14ca-cc74-44ef-abe3-a59a92105ae4-kube-api-access-snw6p\") pod \"oauth-openshift-69bcbbd7f8-wrjwq\" (UID: \"9f1c14ca-cc74-44ef-abe3-a59a92105ae4\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.275816 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.716137 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq"] Oct 02 21:28:07 crc kubenswrapper[4636]: I1002 21:28:07.791419 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" event={"ID":"9f1c14ca-cc74-44ef-abe3-a59a92105ae4","Type":"ContainerStarted","Data":"e99071fdc4f1e0ef88843229de4fb0453715538e24f09e5dce8f7a7406a53c9f"} Oct 02 21:28:08 crc kubenswrapper[4636]: I1002 21:28:08.799458 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" event={"ID":"9f1c14ca-cc74-44ef-abe3-a59a92105ae4","Type":"ContainerStarted","Data":"739fc409d5bc1cdf15139fa972a5d448a92304957236507482a06924b5f89cba"} Oct 02 21:28:08 crc kubenswrapper[4636]: I1002 21:28:08.799711 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:08 crc kubenswrapper[4636]: I1002 21:28:08.808130 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" Oct 02 21:28:08 crc kubenswrapper[4636]: I1002 21:28:08.828488 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-wrjwq" podStartSLOduration=34.828469688 podStartE2EDuration="34.828469688s" podCreationTimestamp="2025-10-02 21:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:28:08.826942335 +0000 UTC m=+280.149950394" watchObservedRunningTime="2025-10-02 21:28:08.828469688 +0000 UTC m=+280.151477717" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.161436 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-69pw2"] Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.162140 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-69pw2" podUID="5332eaa0-5912-4363-b6f4-acb85584401e" containerName="registry-server" containerID="cri-o://5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43" gracePeriod=30 Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.172225 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h5pjb"] Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.172490 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h5pjb" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerName="registry-server" containerID="cri-o://8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1" gracePeriod=30 Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.184722 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g4hs6"] Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.184952 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" podUID="610b97c0-7fa1-4c6b-bfb8-2247491aae21" containerName="marketplace-operator" containerID="cri-o://0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00" gracePeriod=30 Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.192138 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tgg4v"] Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.192575 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tgg4v" podUID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" containerName="registry-server" containerID="cri-o://8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9" gracePeriod=30 Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.202273 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4nfgq"] Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.202478 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4nfgq" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerName="registry-server" containerID="cri-o://b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5" gracePeriod=30 Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.216165 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xzwzk"] Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.216760 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.230731 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xzwzk"] Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.387874 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/da2029b8-9b9d-4692-9e88-5ab20a3582ad-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xzwzk\" (UID: \"da2029b8-9b9d-4692-9e88-5ab20a3582ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.387921 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/da2029b8-9b9d-4692-9e88-5ab20a3582ad-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xzwzk\" (UID: \"da2029b8-9b9d-4692-9e88-5ab20a3582ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.387949 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc8kp\" (UniqueName: \"kubernetes.io/projected/da2029b8-9b9d-4692-9e88-5ab20a3582ad-kube-api-access-jc8kp\") pod \"marketplace-operator-79b997595-xzwzk\" (UID: \"da2029b8-9b9d-4692-9e88-5ab20a3582ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.488818 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/da2029b8-9b9d-4692-9e88-5ab20a3582ad-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xzwzk\" (UID: \"da2029b8-9b9d-4692-9e88-5ab20a3582ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.488876 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/da2029b8-9b9d-4692-9e88-5ab20a3582ad-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xzwzk\" (UID: \"da2029b8-9b9d-4692-9e88-5ab20a3582ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.488922 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc8kp\" (UniqueName: \"kubernetes.io/projected/da2029b8-9b9d-4692-9e88-5ab20a3582ad-kube-api-access-jc8kp\") pod \"marketplace-operator-79b997595-xzwzk\" (UID: \"da2029b8-9b9d-4692-9e88-5ab20a3582ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.490821 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/da2029b8-9b9d-4692-9e88-5ab20a3582ad-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xzwzk\" (UID: \"da2029b8-9b9d-4692-9e88-5ab20a3582ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.499413 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/da2029b8-9b9d-4692-9e88-5ab20a3582ad-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xzwzk\" (UID: \"da2029b8-9b9d-4692-9e88-5ab20a3582ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.507287 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc8kp\" (UniqueName: \"kubernetes.io/projected/da2029b8-9b9d-4692-9e88-5ab20a3582ad-kube-api-access-jc8kp\") pod \"marketplace-operator-79b997595-xzwzk\" (UID: \"da2029b8-9b9d-4692-9e88-5ab20a3582ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.533279 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.567953 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.640284 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.653131 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:28:24 crc kubenswrapper[4636]: E1002 21:28:24.690411 4636 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5 is running failed: container process not found" containerID="b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5" cmd=["grpc_health_probe","-addr=:50051"] Oct 02 21:28:24 crc kubenswrapper[4636]: E1002 21:28:24.693985 4636 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5 is running failed: container process not found" containerID="b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5" cmd=["grpc_health_probe","-addr=:50051"] Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.694241 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-utilities\") pod \"5332eaa0-5912-4363-b6f4-acb85584401e\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.694336 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd84r\" (UniqueName: \"kubernetes.io/projected/5332eaa0-5912-4363-b6f4-acb85584401e-kube-api-access-sd84r\") pod \"5332eaa0-5912-4363-b6f4-acb85584401e\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.694425 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-catalog-content\") pod \"5332eaa0-5912-4363-b6f4-acb85584401e\" (UID: \"5332eaa0-5912-4363-b6f4-acb85584401e\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.696298 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-utilities" (OuterVolumeSpecName: "utilities") pod "5332eaa0-5912-4363-b6f4-acb85584401e" (UID: "5332eaa0-5912-4363-b6f4-acb85584401e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: E1002 21:28:24.696412 4636 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5 is running failed: container process not found" containerID="b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5" cmd=["grpc_health_probe","-addr=:50051"] Oct 02 21:28:24 crc kubenswrapper[4636]: E1002 21:28:24.696458 4636 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-4nfgq" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerName="registry-server" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.697359 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5332eaa0-5912-4363-b6f4-acb85584401e-kube-api-access-sd84r" (OuterVolumeSpecName: "kube-api-access-sd84r") pod "5332eaa0-5912-4363-b6f4-acb85584401e" (UID: "5332eaa0-5912-4363-b6f4-acb85584401e"). InnerVolumeSpecName "kube-api-access-sd84r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.698323 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.780843 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.795581 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fd7d\" (UniqueName: \"kubernetes.io/projected/14a6e48e-5e60-4cb6-9c9c-922868118ed4-kube-api-access-4fd7d\") pod \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.795623 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqmpn\" (UniqueName: \"kubernetes.io/projected/efd2d320-6d76-4faa-b1b2-d062fd21cec8-kube-api-access-qqmpn\") pod \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.795672 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-catalog-content\") pod \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.795732 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-utilities\") pod \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\" (UID: \"14a6e48e-5e60-4cb6-9c9c-922868118ed4\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.795863 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-catalog-content\") pod \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.795883 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbp8p\" (UniqueName: \"kubernetes.io/projected/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-kube-api-access-dbp8p\") pod \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.795902 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-catalog-content\") pod \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.795935 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-utilities\") pod \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\" (UID: \"efd2d320-6d76-4faa-b1b2-d062fd21cec8\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.795962 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-utilities\") pod \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\" (UID: \"825fd9ab-b13c-4a6c-88d2-63710ba4ad41\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.796144 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.796165 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd84r\" (UniqueName: \"kubernetes.io/projected/5332eaa0-5912-4363-b6f4-acb85584401e-kube-api-access-sd84r\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.797336 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-utilities" (OuterVolumeSpecName: "utilities") pod "825fd9ab-b13c-4a6c-88d2-63710ba4ad41" (UID: "825fd9ab-b13c-4a6c-88d2-63710ba4ad41"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.800762 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-utilities" (OuterVolumeSpecName: "utilities") pod "14a6e48e-5e60-4cb6-9c9c-922868118ed4" (UID: "14a6e48e-5e60-4cb6-9c9c-922868118ed4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.802714 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-kube-api-access-dbp8p" (OuterVolumeSpecName: "kube-api-access-dbp8p") pod "825fd9ab-b13c-4a6c-88d2-63710ba4ad41" (UID: "825fd9ab-b13c-4a6c-88d2-63710ba4ad41"). InnerVolumeSpecName "kube-api-access-dbp8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.805992 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-utilities" (OuterVolumeSpecName: "utilities") pod "efd2d320-6d76-4faa-b1b2-d062fd21cec8" (UID: "efd2d320-6d76-4faa-b1b2-d062fd21cec8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.811421 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5332eaa0-5912-4363-b6f4-acb85584401e" (UID: "5332eaa0-5912-4363-b6f4-acb85584401e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.814236 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a6e48e-5e60-4cb6-9c9c-922868118ed4-kube-api-access-4fd7d" (OuterVolumeSpecName: "kube-api-access-4fd7d") pod "14a6e48e-5e60-4cb6-9c9c-922868118ed4" (UID: "14a6e48e-5e60-4cb6-9c9c-922868118ed4"). InnerVolumeSpecName "kube-api-access-4fd7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.826634 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efd2d320-6d76-4faa-b1b2-d062fd21cec8-kube-api-access-qqmpn" (OuterVolumeSpecName: "kube-api-access-qqmpn") pod "efd2d320-6d76-4faa-b1b2-d062fd21cec8" (UID: "efd2d320-6d76-4faa-b1b2-d062fd21cec8"). InnerVolumeSpecName "kube-api-access-qqmpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.833982 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "825fd9ab-b13c-4a6c-88d2-63710ba4ad41" (UID: "825fd9ab-b13c-4a6c-88d2-63710ba4ad41"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.883469 4636 generic.go:334] "Generic (PLEG): container finished" podID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerID="b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5" exitCode=0 Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.883521 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nfgq" event={"ID":"14a6e48e-5e60-4cb6-9c9c-922868118ed4","Type":"ContainerDied","Data":"b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5"} Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.883546 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nfgq" event={"ID":"14a6e48e-5e60-4cb6-9c9c-922868118ed4","Type":"ContainerDied","Data":"db263d8ca52b6ec153004ea91b46075db06888edf89c6f566f8f7e02d0756a28"} Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.883562 4636 scope.go:117] "RemoveContainer" containerID="b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.883660 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4nfgq" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.895543 4636 generic.go:334] "Generic (PLEG): container finished" podID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerID="8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1" exitCode=0 Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.895624 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5pjb" event={"ID":"efd2d320-6d76-4faa-b1b2-d062fd21cec8","Type":"ContainerDied","Data":"8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1"} Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.895670 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5pjb" event={"ID":"efd2d320-6d76-4faa-b1b2-d062fd21cec8","Type":"ContainerDied","Data":"fc907fb40fb0c397418ffcfe35775b90f7e805121da30c01a4263e0ce11f8065"} Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.895782 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5pjb" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.896612 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "efd2d320-6d76-4faa-b1b2-d062fd21cec8" (UID: "efd2d320-6d76-4faa-b1b2-d062fd21cec8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.897193 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hxzm\" (UniqueName: \"kubernetes.io/projected/610b97c0-7fa1-4c6b-bfb8-2247491aae21-kube-api-access-6hxzm\") pod \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.897323 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-trusted-ca\") pod \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.897449 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-operator-metrics\") pod \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\" (UID: \"610b97c0-7fa1-4c6b-bfb8-2247491aae21\") " Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.897740 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.898201 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.898319 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fd7d\" (UniqueName: \"kubernetes.io/projected/14a6e48e-5e60-4cb6-9c9c-922868118ed4-kube-api-access-4fd7d\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.898391 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqmpn\" (UniqueName: \"kubernetes.io/projected/efd2d320-6d76-4faa-b1b2-d062fd21cec8-kube-api-access-qqmpn\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.898448 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.898504 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5332eaa0-5912-4363-b6f4-acb85584401e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.898564 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efd2d320-6d76-4faa-b1b2-d062fd21cec8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.898621 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbp8p\" (UniqueName: \"kubernetes.io/projected/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-kube-api-access-dbp8p\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.898677 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/825fd9ab-b13c-4a6c-88d2-63710ba4ad41-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.898366 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "610b97c0-7fa1-4c6b-bfb8-2247491aae21" (UID: "610b97c0-7fa1-4c6b-bfb8-2247491aae21"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.902127 4636 generic.go:334] "Generic (PLEG): container finished" podID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" containerID="8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9" exitCode=0 Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.902721 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tgg4v" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.902833 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgg4v" event={"ID":"825fd9ab-b13c-4a6c-88d2-63710ba4ad41","Type":"ContainerDied","Data":"8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9"} Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.902866 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgg4v" event={"ID":"825fd9ab-b13c-4a6c-88d2-63710ba4ad41","Type":"ContainerDied","Data":"9203418e223e54b2b1a2954f25548731eb98fbfe98397aa2c96de081b97b2f92"} Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.902791 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/610b97c0-7fa1-4c6b-bfb8-2247491aae21-kube-api-access-6hxzm" (OuterVolumeSpecName: "kube-api-access-6hxzm") pod "610b97c0-7fa1-4c6b-bfb8-2247491aae21" (UID: "610b97c0-7fa1-4c6b-bfb8-2247491aae21"). InnerVolumeSpecName "kube-api-access-6hxzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.914153 4636 generic.go:334] "Generic (PLEG): container finished" podID="5332eaa0-5912-4363-b6f4-acb85584401e" containerID="5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43" exitCode=0 Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.914245 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69pw2" event={"ID":"5332eaa0-5912-4363-b6f4-acb85584401e","Type":"ContainerDied","Data":"5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43"} Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.914449 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69pw2" event={"ID":"5332eaa0-5912-4363-b6f4-acb85584401e","Type":"ContainerDied","Data":"5d205208bdef460cff0ed4d90f88dcd423d63498f8163356d5d3e7bdaa750161"} Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.914479 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-69pw2" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.915478 4636 scope.go:117] "RemoveContainer" containerID="edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.916194 4636 generic.go:334] "Generic (PLEG): container finished" podID="610b97c0-7fa1-4c6b-bfb8-2247491aae21" containerID="0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00" exitCode=0 Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.916242 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" event={"ID":"610b97c0-7fa1-4c6b-bfb8-2247491aae21","Type":"ContainerDied","Data":"0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00"} Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.916270 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.916279 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-g4hs6" event={"ID":"610b97c0-7fa1-4c6b-bfb8-2247491aae21","Type":"ContainerDied","Data":"84b48518e05875738e5060078a7f07db653cc8d27b109c0b49ad94d64e25bb38"} Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.921049 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "610b97c0-7fa1-4c6b-bfb8-2247491aae21" (UID: "610b97c0-7fa1-4c6b-bfb8-2247491aae21"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:28:24 crc kubenswrapper[4636]: I1002 21:28:24.961862 4636 scope.go:117] "RemoveContainer" containerID="2475d6ec173c929667de3c58daada53ab115d98cded91ccd55a9b4c2d2d17a8d" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.002207 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-69pw2"] Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.003172 4636 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.003299 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hxzm\" (UniqueName: \"kubernetes.io/projected/610b97c0-7fa1-4c6b-bfb8-2247491aae21-kube-api-access-6hxzm\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.003395 4636 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/610b97c0-7fa1-4c6b-bfb8-2247491aae21-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.004176 4636 scope.go:117] "RemoveContainer" containerID="b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.007113 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-69pw2"] Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.012697 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5\": container with ID starting with b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5 not found: ID does not exist" containerID="b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.013383 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5"} err="failed to get container status \"b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5\": rpc error: code = NotFound desc = could not find container \"b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5\": container with ID starting with b5644a0b767f63f749e52fc30598e4bb7319f1f1275aa62cce801f5a0b94bbd5 not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.013782 4636 scope.go:117] "RemoveContainer" containerID="edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.015064 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7\": container with ID starting with edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7 not found: ID does not exist" containerID="edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.015102 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7"} err="failed to get container status \"edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7\": rpc error: code = NotFound desc = could not find container \"edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7\": container with ID starting with edf60318a2d76029fbc486687638281466c9cc3ede20e31102c638d1df43a5d7 not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.015132 4636 scope.go:117] "RemoveContainer" containerID="2475d6ec173c929667de3c58daada53ab115d98cded91ccd55a9b4c2d2d17a8d" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.016958 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2475d6ec173c929667de3c58daada53ab115d98cded91ccd55a9b4c2d2d17a8d\": container with ID starting with 2475d6ec173c929667de3c58daada53ab115d98cded91ccd55a9b4c2d2d17a8d not found: ID does not exist" containerID="2475d6ec173c929667de3c58daada53ab115d98cded91ccd55a9b4c2d2d17a8d" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.017107 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2475d6ec173c929667de3c58daada53ab115d98cded91ccd55a9b4c2d2d17a8d"} err="failed to get container status \"2475d6ec173c929667de3c58daada53ab115d98cded91ccd55a9b4c2d2d17a8d\": rpc error: code = NotFound desc = could not find container \"2475d6ec173c929667de3c58daada53ab115d98cded91ccd55a9b4c2d2d17a8d\": container with ID starting with 2475d6ec173c929667de3c58daada53ab115d98cded91ccd55a9b4c2d2d17a8d not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.017231 4636 scope.go:117] "RemoveContainer" containerID="8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.027742 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tgg4v"] Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.030471 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tgg4v"] Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.042240 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14a6e48e-5e60-4cb6-9c9c-922868118ed4" (UID: "14a6e48e-5e60-4cb6-9c9c-922868118ed4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.049629 4636 scope.go:117] "RemoveContainer" containerID="7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.074247 4636 scope.go:117] "RemoveContainer" containerID="b9267470dbe39bb13350347701d06da2c86873f30d149984e1195be261118376" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.094075 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xzwzk"] Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.094840 4636 scope.go:117] "RemoveContainer" containerID="8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.096079 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1\": container with ID starting with 8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1 not found: ID does not exist" containerID="8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.096114 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1"} err="failed to get container status \"8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1\": rpc error: code = NotFound desc = could not find container \"8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1\": container with ID starting with 8e53f7c6fc1b6dce761bbaf69b8ac25bb332585617b3c8d07f8b6ccc8116bad1 not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.096137 4636 scope.go:117] "RemoveContainer" containerID="7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.096400 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e\": container with ID starting with 7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e not found: ID does not exist" containerID="7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.096442 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e"} err="failed to get container status \"7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e\": rpc error: code = NotFound desc = could not find container \"7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e\": container with ID starting with 7ea499738c7d0f028aa32f496f1dbf2926abf5c54010e54ff5d7b5e321b59f9e not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.096512 4636 scope.go:117] "RemoveContainer" containerID="b9267470dbe39bb13350347701d06da2c86873f30d149984e1195be261118376" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.096950 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9267470dbe39bb13350347701d06da2c86873f30d149984e1195be261118376\": container with ID starting with b9267470dbe39bb13350347701d06da2c86873f30d149984e1195be261118376 not found: ID does not exist" containerID="b9267470dbe39bb13350347701d06da2c86873f30d149984e1195be261118376" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.097004 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9267470dbe39bb13350347701d06da2c86873f30d149984e1195be261118376"} err="failed to get container status \"b9267470dbe39bb13350347701d06da2c86873f30d149984e1195be261118376\": rpc error: code = NotFound desc = could not find container \"b9267470dbe39bb13350347701d06da2c86873f30d149984e1195be261118376\": container with ID starting with b9267470dbe39bb13350347701d06da2c86873f30d149984e1195be261118376 not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.097040 4636 scope.go:117] "RemoveContainer" containerID="8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.104052 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a6e48e-5e60-4cb6-9c9c-922868118ed4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.117545 4636 scope.go:117] "RemoveContainer" containerID="70ca57417a2920200001c926bc7436a8d29e63b53030330730c2cc3cad1bc014" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.133656 4636 scope.go:117] "RemoveContainer" containerID="c9df0d7648a264bc50e5f57462f2795c8e5fe085447e005d49b7dc117a4e7c06" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.146167 4636 scope.go:117] "RemoveContainer" containerID="8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.146893 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9\": container with ID starting with 8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9 not found: ID does not exist" containerID="8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.146936 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9"} err="failed to get container status \"8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9\": rpc error: code = NotFound desc = could not find container \"8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9\": container with ID starting with 8a6a944bd59ebab12d97b037d7864f601f703f53684fa8cb8b09517714f6fdc9 not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.146963 4636 scope.go:117] "RemoveContainer" containerID="70ca57417a2920200001c926bc7436a8d29e63b53030330730c2cc3cad1bc014" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.147240 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70ca57417a2920200001c926bc7436a8d29e63b53030330730c2cc3cad1bc014\": container with ID starting with 70ca57417a2920200001c926bc7436a8d29e63b53030330730c2cc3cad1bc014 not found: ID does not exist" containerID="70ca57417a2920200001c926bc7436a8d29e63b53030330730c2cc3cad1bc014" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.147259 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70ca57417a2920200001c926bc7436a8d29e63b53030330730c2cc3cad1bc014"} err="failed to get container status \"70ca57417a2920200001c926bc7436a8d29e63b53030330730c2cc3cad1bc014\": rpc error: code = NotFound desc = could not find container \"70ca57417a2920200001c926bc7436a8d29e63b53030330730c2cc3cad1bc014\": container with ID starting with 70ca57417a2920200001c926bc7436a8d29e63b53030330730c2cc3cad1bc014 not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.147273 4636 scope.go:117] "RemoveContainer" containerID="c9df0d7648a264bc50e5f57462f2795c8e5fe085447e005d49b7dc117a4e7c06" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.148566 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9df0d7648a264bc50e5f57462f2795c8e5fe085447e005d49b7dc117a4e7c06\": container with ID starting with c9df0d7648a264bc50e5f57462f2795c8e5fe085447e005d49b7dc117a4e7c06 not found: ID does not exist" containerID="c9df0d7648a264bc50e5f57462f2795c8e5fe085447e005d49b7dc117a4e7c06" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.148594 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9df0d7648a264bc50e5f57462f2795c8e5fe085447e005d49b7dc117a4e7c06"} err="failed to get container status \"c9df0d7648a264bc50e5f57462f2795c8e5fe085447e005d49b7dc117a4e7c06\": rpc error: code = NotFound desc = could not find container \"c9df0d7648a264bc50e5f57462f2795c8e5fe085447e005d49b7dc117a4e7c06\": container with ID starting with c9df0d7648a264bc50e5f57462f2795c8e5fe085447e005d49b7dc117a4e7c06 not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.148614 4636 scope.go:117] "RemoveContainer" containerID="5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.167553 4636 scope.go:117] "RemoveContainer" containerID="0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.189489 4636 scope.go:117] "RemoveContainer" containerID="b23733fb6ea42608597f5e5540fa260e7b133167de749d20dfc6cb9068f5bd2c" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.210467 4636 scope.go:117] "RemoveContainer" containerID="5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.211038 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43\": container with ID starting with 5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43 not found: ID does not exist" containerID="5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.211064 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43"} err="failed to get container status \"5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43\": rpc error: code = NotFound desc = could not find container \"5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43\": container with ID starting with 5c5148a7ece9955be4a603060c30cde73fc3a05e8a630cb86543d2e0ff41bd43 not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.211096 4636 scope.go:117] "RemoveContainer" containerID="0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.211415 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854\": container with ID starting with 0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854 not found: ID does not exist" containerID="0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.211432 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854"} err="failed to get container status \"0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854\": rpc error: code = NotFound desc = could not find container \"0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854\": container with ID starting with 0f0330cf4327fd3bdd0359256d3b906219543b778b234cfc3130f2b2ac594854 not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.211580 4636 scope.go:117] "RemoveContainer" containerID="b23733fb6ea42608597f5e5540fa260e7b133167de749d20dfc6cb9068f5bd2c" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.211842 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b23733fb6ea42608597f5e5540fa260e7b133167de749d20dfc6cb9068f5bd2c\": container with ID starting with b23733fb6ea42608597f5e5540fa260e7b133167de749d20dfc6cb9068f5bd2c not found: ID does not exist" containerID="b23733fb6ea42608597f5e5540fa260e7b133167de749d20dfc6cb9068f5bd2c" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.211885 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b23733fb6ea42608597f5e5540fa260e7b133167de749d20dfc6cb9068f5bd2c"} err="failed to get container status \"b23733fb6ea42608597f5e5540fa260e7b133167de749d20dfc6cb9068f5bd2c\": rpc error: code = NotFound desc = could not find container \"b23733fb6ea42608597f5e5540fa260e7b133167de749d20dfc6cb9068f5bd2c\": container with ID starting with b23733fb6ea42608597f5e5540fa260e7b133167de749d20dfc6cb9068f5bd2c not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.212016 4636 scope.go:117] "RemoveContainer" containerID="0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.227800 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4nfgq"] Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.231576 4636 scope.go:117] "RemoveContainer" containerID="0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00" Oct 02 21:28:25 crc kubenswrapper[4636]: E1002 21:28:25.235209 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00\": container with ID starting with 0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00 not found: ID does not exist" containerID="0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.237887 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00"} err="failed to get container status \"0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00\": rpc error: code = NotFound desc = could not find container \"0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00\": container with ID starting with 0e0cad40e7bd36ebd53b33ad193e9d145221400141940f072edf036d65395e00 not found: ID does not exist" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.241043 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4nfgq"] Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.245788 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h5pjb"] Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.253240 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h5pjb"] Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.262504 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g4hs6"] Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.264791 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g4hs6"] Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.611280 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" path="/var/lib/kubelet/pods/14a6e48e-5e60-4cb6-9c9c-922868118ed4/volumes" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.612119 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5332eaa0-5912-4363-b6f4-acb85584401e" path="/var/lib/kubelet/pods/5332eaa0-5912-4363-b6f4-acb85584401e/volumes" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.612719 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="610b97c0-7fa1-4c6b-bfb8-2247491aae21" path="/var/lib/kubelet/pods/610b97c0-7fa1-4c6b-bfb8-2247491aae21/volumes" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.613203 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" path="/var/lib/kubelet/pods/825fd9ab-b13c-4a6c-88d2-63710ba4ad41/volumes" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.613845 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" path="/var/lib/kubelet/pods/efd2d320-6d76-4faa-b1b2-d062fd21cec8/volumes" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.923036 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" event={"ID":"da2029b8-9b9d-4692-9e88-5ab20a3582ad","Type":"ContainerStarted","Data":"ee2513f08a5a037a2bddc9479c80c6116e0f05652652a8bb3875d5f466fe888c"} Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.923075 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" event={"ID":"da2029b8-9b9d-4692-9e88-5ab20a3582ad","Type":"ContainerStarted","Data":"c5bd3a1ac01534676d70a035b4e6526d472860e2e062856934093eb3ab98c826"} Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.923172 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.927267 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" Oct 02 21:28:25 crc kubenswrapper[4636]: I1002 21:28:25.944651 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-xzwzk" podStartSLOduration=1.9446254710000002 podStartE2EDuration="1.944625471s" podCreationTimestamp="2025-10-02 21:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:28:25.940446555 +0000 UTC m=+297.263454584" watchObservedRunningTime="2025-10-02 21:28:25.944625471 +0000 UTC m=+297.267633500" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.375607 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ddfq4"] Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.375842 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" containerName="extract-utilities" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.375856 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" containerName="extract-utilities" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.375871 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerName="extract-utilities" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.375879 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerName="extract-utilities" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.375895 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="610b97c0-7fa1-4c6b-bfb8-2247491aae21" containerName="marketplace-operator" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.375905 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="610b97c0-7fa1-4c6b-bfb8-2247491aae21" containerName="marketplace-operator" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.375916 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5332eaa0-5912-4363-b6f4-acb85584401e" containerName="extract-utilities" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.375925 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5332eaa0-5912-4363-b6f4-acb85584401e" containerName="extract-utilities" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.375942 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerName="extract-content" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376012 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerName="extract-content" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.376028 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerName="extract-utilities" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376036 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerName="extract-utilities" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.376047 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" containerName="extract-content" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376056 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" containerName="extract-content" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.376069 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376076 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.376085 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5332eaa0-5912-4363-b6f4-acb85584401e" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376093 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5332eaa0-5912-4363-b6f4-acb85584401e" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.376104 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5332eaa0-5912-4363-b6f4-acb85584401e" containerName="extract-content" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376114 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5332eaa0-5912-4363-b6f4-acb85584401e" containerName="extract-content" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.376126 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerName="extract-content" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376134 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerName="extract-content" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.376145 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376153 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: E1002 21:28:26.376165 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376173 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376282 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="efd2d320-6d76-4faa-b1b2-d062fd21cec8" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376300 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a6e48e-5e60-4cb6-9c9c-922868118ed4" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376314 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="610b97c0-7fa1-4c6b-bfb8-2247491aae21" containerName="marketplace-operator" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376323 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="5332eaa0-5912-4363-b6f4-acb85584401e" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.376334 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="825fd9ab-b13c-4a6c-88d2-63710ba4ad41" containerName="registry-server" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.377167 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.379505 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.385120 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ddfq4"] Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.522270 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a586e9c-7ecc-49a9-8a0d-8b686def8a59-utilities\") pod \"redhat-marketplace-ddfq4\" (UID: \"0a586e9c-7ecc-49a9-8a0d-8b686def8a59\") " pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.522335 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a586e9c-7ecc-49a9-8a0d-8b686def8a59-catalog-content\") pod \"redhat-marketplace-ddfq4\" (UID: \"0a586e9c-7ecc-49a9-8a0d-8b686def8a59\") " pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.522405 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfm2h\" (UniqueName: \"kubernetes.io/projected/0a586e9c-7ecc-49a9-8a0d-8b686def8a59-kube-api-access-rfm2h\") pod \"redhat-marketplace-ddfq4\" (UID: \"0a586e9c-7ecc-49a9-8a0d-8b686def8a59\") " pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.580838 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t2h9w"] Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.581807 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.584612 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.591437 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t2h9w"] Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.623742 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfm2h\" (UniqueName: \"kubernetes.io/projected/0a586e9c-7ecc-49a9-8a0d-8b686def8a59-kube-api-access-rfm2h\") pod \"redhat-marketplace-ddfq4\" (UID: \"0a586e9c-7ecc-49a9-8a0d-8b686def8a59\") " pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.623804 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a586e9c-7ecc-49a9-8a0d-8b686def8a59-utilities\") pod \"redhat-marketplace-ddfq4\" (UID: \"0a586e9c-7ecc-49a9-8a0d-8b686def8a59\") " pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.623837 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a586e9c-7ecc-49a9-8a0d-8b686def8a59-catalog-content\") pod \"redhat-marketplace-ddfq4\" (UID: \"0a586e9c-7ecc-49a9-8a0d-8b686def8a59\") " pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.624254 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a586e9c-7ecc-49a9-8a0d-8b686def8a59-catalog-content\") pod \"redhat-marketplace-ddfq4\" (UID: \"0a586e9c-7ecc-49a9-8a0d-8b686def8a59\") " pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.624657 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a586e9c-7ecc-49a9-8a0d-8b686def8a59-utilities\") pod \"redhat-marketplace-ddfq4\" (UID: \"0a586e9c-7ecc-49a9-8a0d-8b686def8a59\") " pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.641478 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfm2h\" (UniqueName: \"kubernetes.io/projected/0a586e9c-7ecc-49a9-8a0d-8b686def8a59-kube-api-access-rfm2h\") pod \"redhat-marketplace-ddfq4\" (UID: \"0a586e9c-7ecc-49a9-8a0d-8b686def8a59\") " pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.693948 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.725589 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl996\" (UniqueName: \"kubernetes.io/projected/52af42a7-eff9-490f-afed-2868aced615d-kube-api-access-gl996\") pod \"certified-operators-t2h9w\" (UID: \"52af42a7-eff9-490f-afed-2868aced615d\") " pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.725804 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52af42a7-eff9-490f-afed-2868aced615d-utilities\") pod \"certified-operators-t2h9w\" (UID: \"52af42a7-eff9-490f-afed-2868aced615d\") " pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.725829 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52af42a7-eff9-490f-afed-2868aced615d-catalog-content\") pod \"certified-operators-t2h9w\" (UID: \"52af42a7-eff9-490f-afed-2868aced615d\") " pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.827402 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52af42a7-eff9-490f-afed-2868aced615d-utilities\") pod \"certified-operators-t2h9w\" (UID: \"52af42a7-eff9-490f-afed-2868aced615d\") " pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.827456 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52af42a7-eff9-490f-afed-2868aced615d-catalog-content\") pod \"certified-operators-t2h9w\" (UID: \"52af42a7-eff9-490f-afed-2868aced615d\") " pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.827538 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl996\" (UniqueName: \"kubernetes.io/projected/52af42a7-eff9-490f-afed-2868aced615d-kube-api-access-gl996\") pod \"certified-operators-t2h9w\" (UID: \"52af42a7-eff9-490f-afed-2868aced615d\") " pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.828219 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52af42a7-eff9-490f-afed-2868aced615d-utilities\") pod \"certified-operators-t2h9w\" (UID: \"52af42a7-eff9-490f-afed-2868aced615d\") " pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.835836 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52af42a7-eff9-490f-afed-2868aced615d-catalog-content\") pod \"certified-operators-t2h9w\" (UID: \"52af42a7-eff9-490f-afed-2868aced615d\") " pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.864504 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl996\" (UniqueName: \"kubernetes.io/projected/52af42a7-eff9-490f-afed-2868aced615d-kube-api-access-gl996\") pod \"certified-operators-t2h9w\" (UID: \"52af42a7-eff9-490f-afed-2868aced615d\") " pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:26 crc kubenswrapper[4636]: I1002 21:28:26.892594 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:27 crc kubenswrapper[4636]: I1002 21:28:27.072569 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ddfq4"] Oct 02 21:28:27 crc kubenswrapper[4636]: I1002 21:28:27.249286 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t2h9w"] Oct 02 21:28:27 crc kubenswrapper[4636]: W1002 21:28:27.259757 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52af42a7_eff9_490f_afed_2868aced615d.slice/crio-1aa1193a2982933036fd09cdfe4354c34ae759007ab6fef219fe8a0dc6a8b1d8 WatchSource:0}: Error finding container 1aa1193a2982933036fd09cdfe4354c34ae759007ab6fef219fe8a0dc6a8b1d8: Status 404 returned error can't find the container with id 1aa1193a2982933036fd09cdfe4354c34ae759007ab6fef219fe8a0dc6a8b1d8 Oct 02 21:28:27 crc kubenswrapper[4636]: I1002 21:28:27.939839 4636 generic.go:334] "Generic (PLEG): container finished" podID="0a586e9c-7ecc-49a9-8a0d-8b686def8a59" containerID="7879acc72a0101fc0cfc6cdf7725acd3919f7fd49ab134565c1fc151d46ed377" exitCode=0 Oct 02 21:28:27 crc kubenswrapper[4636]: I1002 21:28:27.939919 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ddfq4" event={"ID":"0a586e9c-7ecc-49a9-8a0d-8b686def8a59","Type":"ContainerDied","Data":"7879acc72a0101fc0cfc6cdf7725acd3919f7fd49ab134565c1fc151d46ed377"} Oct 02 21:28:27 crc kubenswrapper[4636]: I1002 21:28:27.940229 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ddfq4" event={"ID":"0a586e9c-7ecc-49a9-8a0d-8b686def8a59","Type":"ContainerStarted","Data":"d4188ad593ea3c597184f74ef78c49fc03bed5976e10c877c6a1806b85d29d7d"} Oct 02 21:28:27 crc kubenswrapper[4636]: I1002 21:28:27.941787 4636 generic.go:334] "Generic (PLEG): container finished" podID="52af42a7-eff9-490f-afed-2868aced615d" containerID="edf520398dac854421a68dfd0534edadccb2f4f96b0fd75f1c53d3254ddce4c0" exitCode=0 Oct 02 21:28:27 crc kubenswrapper[4636]: I1002 21:28:27.942239 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2h9w" event={"ID":"52af42a7-eff9-490f-afed-2868aced615d","Type":"ContainerDied","Data":"edf520398dac854421a68dfd0534edadccb2f4f96b0fd75f1c53d3254ddce4c0"} Oct 02 21:28:27 crc kubenswrapper[4636]: I1002 21:28:27.942264 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2h9w" event={"ID":"52af42a7-eff9-490f-afed-2868aced615d","Type":"ContainerStarted","Data":"1aa1193a2982933036fd09cdfe4354c34ae759007ab6fef219fe8a0dc6a8b1d8"} Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.782225 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cklqw"] Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.783738 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.786407 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.795977 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cklqw"] Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.949638 4636 generic.go:334] "Generic (PLEG): container finished" podID="0a586e9c-7ecc-49a9-8a0d-8b686def8a59" containerID="20f609712a9e25651a2289788222d37d174492985d4c429b142e38d780fc6cad" exitCode=0 Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.949679 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ddfq4" event={"ID":"0a586e9c-7ecc-49a9-8a0d-8b686def8a59","Type":"ContainerDied","Data":"20f609712a9e25651a2289788222d37d174492985d4c429b142e38d780fc6cad"} Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.953975 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db50ff88-f896-4e65-84cb-d7e02349ad30-utilities\") pod \"redhat-operators-cklqw\" (UID: \"db50ff88-f896-4e65-84cb-d7e02349ad30\") " pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.953997 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db50ff88-f896-4e65-84cb-d7e02349ad30-catalog-content\") pod \"redhat-operators-cklqw\" (UID: \"db50ff88-f896-4e65-84cb-d7e02349ad30\") " pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.954043 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hxgx\" (UniqueName: \"kubernetes.io/projected/db50ff88-f896-4e65-84cb-d7e02349ad30-kube-api-access-2hxgx\") pod \"redhat-operators-cklqw\" (UID: \"db50ff88-f896-4e65-84cb-d7e02349ad30\") " pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.976633 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6twd6"] Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.979345 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:28 crc kubenswrapper[4636]: I1002 21:28:28.981985 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:28.999330 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6twd6"] Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.055158 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db50ff88-f896-4e65-84cb-d7e02349ad30-utilities\") pod \"redhat-operators-cklqw\" (UID: \"db50ff88-f896-4e65-84cb-d7e02349ad30\") " pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.055201 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db50ff88-f896-4e65-84cb-d7e02349ad30-catalog-content\") pod \"redhat-operators-cklqw\" (UID: \"db50ff88-f896-4e65-84cb-d7e02349ad30\") " pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.055225 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07a99b11-82a1-4cc1-82cb-f8e4b256eab8-catalog-content\") pod \"community-operators-6twd6\" (UID: \"07a99b11-82a1-4cc1-82cb-f8e4b256eab8\") " pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.055260 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9kv5\" (UniqueName: \"kubernetes.io/projected/07a99b11-82a1-4cc1-82cb-f8e4b256eab8-kube-api-access-n9kv5\") pod \"community-operators-6twd6\" (UID: \"07a99b11-82a1-4cc1-82cb-f8e4b256eab8\") " pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.055290 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07a99b11-82a1-4cc1-82cb-f8e4b256eab8-utilities\") pod \"community-operators-6twd6\" (UID: \"07a99b11-82a1-4cc1-82cb-f8e4b256eab8\") " pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.055309 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hxgx\" (UniqueName: \"kubernetes.io/projected/db50ff88-f896-4e65-84cb-d7e02349ad30-kube-api-access-2hxgx\") pod \"redhat-operators-cklqw\" (UID: \"db50ff88-f896-4e65-84cb-d7e02349ad30\") " pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.055626 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db50ff88-f896-4e65-84cb-d7e02349ad30-utilities\") pod \"redhat-operators-cklqw\" (UID: \"db50ff88-f896-4e65-84cb-d7e02349ad30\") " pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.055882 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db50ff88-f896-4e65-84cb-d7e02349ad30-catalog-content\") pod \"redhat-operators-cklqw\" (UID: \"db50ff88-f896-4e65-84cb-d7e02349ad30\") " pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.072495 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hxgx\" (UniqueName: \"kubernetes.io/projected/db50ff88-f896-4e65-84cb-d7e02349ad30-kube-api-access-2hxgx\") pod \"redhat-operators-cklqw\" (UID: \"db50ff88-f896-4e65-84cb-d7e02349ad30\") " pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.097501 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.156670 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07a99b11-82a1-4cc1-82cb-f8e4b256eab8-catalog-content\") pod \"community-operators-6twd6\" (UID: \"07a99b11-82a1-4cc1-82cb-f8e4b256eab8\") " pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.156983 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9kv5\" (UniqueName: \"kubernetes.io/projected/07a99b11-82a1-4cc1-82cb-f8e4b256eab8-kube-api-access-n9kv5\") pod \"community-operators-6twd6\" (UID: \"07a99b11-82a1-4cc1-82cb-f8e4b256eab8\") " pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.157391 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07a99b11-82a1-4cc1-82cb-f8e4b256eab8-utilities\") pod \"community-operators-6twd6\" (UID: \"07a99b11-82a1-4cc1-82cb-f8e4b256eab8\") " pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.157209 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07a99b11-82a1-4cc1-82cb-f8e4b256eab8-catalog-content\") pod \"community-operators-6twd6\" (UID: \"07a99b11-82a1-4cc1-82cb-f8e4b256eab8\") " pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.158433 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07a99b11-82a1-4cc1-82cb-f8e4b256eab8-utilities\") pod \"community-operators-6twd6\" (UID: \"07a99b11-82a1-4cc1-82cb-f8e4b256eab8\") " pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.179225 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9kv5\" (UniqueName: \"kubernetes.io/projected/07a99b11-82a1-4cc1-82cb-f8e4b256eab8-kube-api-access-n9kv5\") pod \"community-operators-6twd6\" (UID: \"07a99b11-82a1-4cc1-82cb-f8e4b256eab8\") " pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.273902 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cklqw"] Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.293617 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.723686 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6twd6"] Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.957532 4636 generic.go:334] "Generic (PLEG): container finished" podID="db50ff88-f896-4e65-84cb-d7e02349ad30" containerID="8111781a1a277f8e4f8f6ed8adf6c70e9592c79553d617700d405ca0123eb84c" exitCode=0 Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.957584 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cklqw" event={"ID":"db50ff88-f896-4e65-84cb-d7e02349ad30","Type":"ContainerDied","Data":"8111781a1a277f8e4f8f6ed8adf6c70e9592c79553d617700d405ca0123eb84c"} Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.957630 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cklqw" event={"ID":"db50ff88-f896-4e65-84cb-d7e02349ad30","Type":"ContainerStarted","Data":"c63bef10b3438a553ec4dab82738dffd8f9b4049bdcbce8bce5dbd468afa2719"} Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.961029 4636 generic.go:334] "Generic (PLEG): container finished" podID="07a99b11-82a1-4cc1-82cb-f8e4b256eab8" containerID="7aec0aed162f41f69770c5931ed5fbb6caa6164e5dc1bd1f0fe241fcea30f184" exitCode=0 Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.961080 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6twd6" event={"ID":"07a99b11-82a1-4cc1-82cb-f8e4b256eab8","Type":"ContainerDied","Data":"7aec0aed162f41f69770c5931ed5fbb6caa6164e5dc1bd1f0fe241fcea30f184"} Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.961097 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6twd6" event={"ID":"07a99b11-82a1-4cc1-82cb-f8e4b256eab8","Type":"ContainerStarted","Data":"02ead77c1f16b978724b7cf4bb07a9c94405649b7ba48f1a8d88d542cdc8d687"} Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.965713 4636 generic.go:334] "Generic (PLEG): container finished" podID="52af42a7-eff9-490f-afed-2868aced615d" containerID="8252e7fec85b643b4b3731d15fb7b6e1d559c1d389b5ddae355da63208ca320e" exitCode=0 Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.966443 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2h9w" event={"ID":"52af42a7-eff9-490f-afed-2868aced615d","Type":"ContainerDied","Data":"8252e7fec85b643b4b3731d15fb7b6e1d559c1d389b5ddae355da63208ca320e"} Oct 02 21:28:29 crc kubenswrapper[4636]: I1002 21:28:29.970901 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ddfq4" event={"ID":"0a586e9c-7ecc-49a9-8a0d-8b686def8a59","Type":"ContainerStarted","Data":"c8911899eefa2f226761576e887aca8df7dcc5c0e1d07021d4f0f2c18451430a"} Oct 02 21:28:30 crc kubenswrapper[4636]: I1002 21:28:30.010097 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ddfq4" podStartSLOduration=2.55677809 podStartE2EDuration="4.010081661s" podCreationTimestamp="2025-10-02 21:28:26 +0000 UTC" firstStartedPulling="2025-10-02 21:28:27.94222179 +0000 UTC m=+299.265229819" lastFinishedPulling="2025-10-02 21:28:29.395525371 +0000 UTC m=+300.718533390" observedRunningTime="2025-10-02 21:28:30.007523419 +0000 UTC m=+301.330531438" watchObservedRunningTime="2025-10-02 21:28:30.010081661 +0000 UTC m=+301.333089680" Oct 02 21:28:30 crc kubenswrapper[4636]: I1002 21:28:30.982590 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2h9w" event={"ID":"52af42a7-eff9-490f-afed-2868aced615d","Type":"ContainerStarted","Data":"ea7126f93033655c3603b32d839d21216b631c76bbe670d21af912f442b2c667"} Oct 02 21:28:30 crc kubenswrapper[4636]: I1002 21:28:30.987603 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6twd6" event={"ID":"07a99b11-82a1-4cc1-82cb-f8e4b256eab8","Type":"ContainerStarted","Data":"da9d1157c1dc969a0cfa2271650af12eabb35879350acfe7d67e19c042b5a697"} Oct 02 21:28:31 crc kubenswrapper[4636]: I1002 21:28:31.004315 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t2h9w" podStartSLOduration=2.483252527 podStartE2EDuration="5.004300662s" podCreationTimestamp="2025-10-02 21:28:26 +0000 UTC" firstStartedPulling="2025-10-02 21:28:27.943171876 +0000 UTC m=+299.266179895" lastFinishedPulling="2025-10-02 21:28:30.464220011 +0000 UTC m=+301.787228030" observedRunningTime="2025-10-02 21:28:31.001196265 +0000 UTC m=+302.324204284" watchObservedRunningTime="2025-10-02 21:28:31.004300662 +0000 UTC m=+302.327308681" Oct 02 21:28:31 crc kubenswrapper[4636]: I1002 21:28:31.994342 4636 generic.go:334] "Generic (PLEG): container finished" podID="07a99b11-82a1-4cc1-82cb-f8e4b256eab8" containerID="da9d1157c1dc969a0cfa2271650af12eabb35879350acfe7d67e19c042b5a697" exitCode=0 Oct 02 21:28:31 crc kubenswrapper[4636]: I1002 21:28:31.994951 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6twd6" event={"ID":"07a99b11-82a1-4cc1-82cb-f8e4b256eab8","Type":"ContainerDied","Data":"da9d1157c1dc969a0cfa2271650af12eabb35879350acfe7d67e19c042b5a697"} Oct 02 21:28:31 crc kubenswrapper[4636]: I1002 21:28:31.997504 4636 generic.go:334] "Generic (PLEG): container finished" podID="db50ff88-f896-4e65-84cb-d7e02349ad30" containerID="b55451ac5b9cfa6f00da08e5d66680d9a974f617a9f7627b4b95cb9ec70a2dda" exitCode=0 Oct 02 21:28:31 crc kubenswrapper[4636]: I1002 21:28:31.997635 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cklqw" event={"ID":"db50ff88-f896-4e65-84cb-d7e02349ad30","Type":"ContainerDied","Data":"b55451ac5b9cfa6f00da08e5d66680d9a974f617a9f7627b4b95cb9ec70a2dda"} Oct 02 21:28:33 crc kubenswrapper[4636]: I1002 21:28:33.016973 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6twd6" event={"ID":"07a99b11-82a1-4cc1-82cb-f8e4b256eab8","Type":"ContainerStarted","Data":"f6554178e42b39506d4b9425d2c9dd93fcdbb624e3fcdfe95d9c999538d6c1e1"} Oct 02 21:28:33 crc kubenswrapper[4636]: I1002 21:28:33.023673 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cklqw" event={"ID":"db50ff88-f896-4e65-84cb-d7e02349ad30","Type":"ContainerStarted","Data":"1cc3240b4931d1445a120407cb7f380980e317fdf821668c0c6b2a208445d5c6"} Oct 02 21:28:33 crc kubenswrapper[4636]: I1002 21:28:33.040533 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6twd6" podStartSLOduration=2.239818745 podStartE2EDuration="5.040517519s" podCreationTimestamp="2025-10-02 21:28:28 +0000 UTC" firstStartedPulling="2025-10-02 21:28:29.963006346 +0000 UTC m=+301.286014355" lastFinishedPulling="2025-10-02 21:28:32.76370511 +0000 UTC m=+304.086713129" observedRunningTime="2025-10-02 21:28:33.038582595 +0000 UTC m=+304.361590624" watchObservedRunningTime="2025-10-02 21:28:33.040517519 +0000 UTC m=+304.363525538" Oct 02 21:28:33 crc kubenswrapper[4636]: I1002 21:28:33.059940 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cklqw" podStartSLOduration=2.44949969 podStartE2EDuration="5.059923641s" podCreationTimestamp="2025-10-02 21:28:28 +0000 UTC" firstStartedPulling="2025-10-02 21:28:29.959772616 +0000 UTC m=+301.282780635" lastFinishedPulling="2025-10-02 21:28:32.570196567 +0000 UTC m=+303.893204586" observedRunningTime="2025-10-02 21:28:33.057174915 +0000 UTC m=+304.380182934" watchObservedRunningTime="2025-10-02 21:28:33.059923641 +0000 UTC m=+304.382931660" Oct 02 21:28:36 crc kubenswrapper[4636]: I1002 21:28:36.695287 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:36 crc kubenswrapper[4636]: I1002 21:28:36.695662 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:36 crc kubenswrapper[4636]: I1002 21:28:36.733476 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:36 crc kubenswrapper[4636]: I1002 21:28:36.893394 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:36 crc kubenswrapper[4636]: I1002 21:28:36.893436 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:36 crc kubenswrapper[4636]: I1002 21:28:36.928403 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:37 crc kubenswrapper[4636]: I1002 21:28:37.083970 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ddfq4" Oct 02 21:28:37 crc kubenswrapper[4636]: I1002 21:28:37.084271 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t2h9w" Oct 02 21:28:39 crc kubenswrapper[4636]: I1002 21:28:39.098676 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:39 crc kubenswrapper[4636]: I1002 21:28:39.098729 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:39 crc kubenswrapper[4636]: I1002 21:28:39.141360 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:28:39 crc kubenswrapper[4636]: I1002 21:28:39.294682 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:39 crc kubenswrapper[4636]: I1002 21:28:39.294725 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:39 crc kubenswrapper[4636]: I1002 21:28:39.337182 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:40 crc kubenswrapper[4636]: I1002 21:28:40.088165 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6twd6" Oct 02 21:28:40 crc kubenswrapper[4636]: I1002 21:28:40.127016 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cklqw" Oct 02 21:29:23 crc kubenswrapper[4636]: I1002 21:29:23.117139 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:29:23 crc kubenswrapper[4636]: I1002 21:29:23.117793 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:29:53 crc kubenswrapper[4636]: I1002 21:29:53.117476 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:29:53 crc kubenswrapper[4636]: I1002 21:29:53.118196 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.147082 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph"] Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.148873 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.151225 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.151420 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.158550 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph"] Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.269059 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90a13ad5-0870-486c-9af3-c172240ee976-config-volume\") pod \"collect-profiles-29324010-cmdph\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.269545 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90a13ad5-0870-486c-9af3-c172240ee976-secret-volume\") pod \"collect-profiles-29324010-cmdph\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.269649 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcthl\" (UniqueName: \"kubernetes.io/projected/90a13ad5-0870-486c-9af3-c172240ee976-kube-api-access-kcthl\") pod \"collect-profiles-29324010-cmdph\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.370942 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90a13ad5-0870-486c-9af3-c172240ee976-config-volume\") pod \"collect-profiles-29324010-cmdph\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.371059 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90a13ad5-0870-486c-9af3-c172240ee976-secret-volume\") pod \"collect-profiles-29324010-cmdph\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.371120 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcthl\" (UniqueName: \"kubernetes.io/projected/90a13ad5-0870-486c-9af3-c172240ee976-kube-api-access-kcthl\") pod \"collect-profiles-29324010-cmdph\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.376510 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90a13ad5-0870-486c-9af3-c172240ee976-config-volume\") pod \"collect-profiles-29324010-cmdph\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.379450 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90a13ad5-0870-486c-9af3-c172240ee976-secret-volume\") pod \"collect-profiles-29324010-cmdph\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.395537 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcthl\" (UniqueName: \"kubernetes.io/projected/90a13ad5-0870-486c-9af3-c172240ee976-kube-api-access-kcthl\") pod \"collect-profiles-29324010-cmdph\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.475796 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:00 crc kubenswrapper[4636]: I1002 21:30:00.713696 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph"] Oct 02 21:30:01 crc kubenswrapper[4636]: I1002 21:30:01.518721 4636 generic.go:334] "Generic (PLEG): container finished" podID="90a13ad5-0870-486c-9af3-c172240ee976" containerID="df50aed8bd4d5301560977d894d74087273953e8e2d8a4f9e31b5f2f3dac168f" exitCode=0 Oct 02 21:30:01 crc kubenswrapper[4636]: I1002 21:30:01.519316 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" event={"ID":"90a13ad5-0870-486c-9af3-c172240ee976","Type":"ContainerDied","Data":"df50aed8bd4d5301560977d894d74087273953e8e2d8a4f9e31b5f2f3dac168f"} Oct 02 21:30:01 crc kubenswrapper[4636]: I1002 21:30:01.519368 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" event={"ID":"90a13ad5-0870-486c-9af3-c172240ee976","Type":"ContainerStarted","Data":"e254b99810d454acfbd1a91d43995b95613f749a55f42703669de2b388440253"} Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.431472 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.530368 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" event={"ID":"90a13ad5-0870-486c-9af3-c172240ee976","Type":"ContainerDied","Data":"e254b99810d454acfbd1a91d43995b95613f749a55f42703669de2b388440253"} Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.530404 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e254b99810d454acfbd1a91d43995b95613f749a55f42703669de2b388440253" Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.530436 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph" Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.541876 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90a13ad5-0870-486c-9af3-c172240ee976-secret-volume\") pod \"90a13ad5-0870-486c-9af3-c172240ee976\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.541925 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcthl\" (UniqueName: \"kubernetes.io/projected/90a13ad5-0870-486c-9af3-c172240ee976-kube-api-access-kcthl\") pod \"90a13ad5-0870-486c-9af3-c172240ee976\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.541949 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90a13ad5-0870-486c-9af3-c172240ee976-config-volume\") pod \"90a13ad5-0870-486c-9af3-c172240ee976\" (UID: \"90a13ad5-0870-486c-9af3-c172240ee976\") " Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.542827 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90a13ad5-0870-486c-9af3-c172240ee976-config-volume" (OuterVolumeSpecName: "config-volume") pod "90a13ad5-0870-486c-9af3-c172240ee976" (UID: "90a13ad5-0870-486c-9af3-c172240ee976"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.546485 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90a13ad5-0870-486c-9af3-c172240ee976-kube-api-access-kcthl" (OuterVolumeSpecName: "kube-api-access-kcthl") pod "90a13ad5-0870-486c-9af3-c172240ee976" (UID: "90a13ad5-0870-486c-9af3-c172240ee976"). InnerVolumeSpecName "kube-api-access-kcthl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.546821 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90a13ad5-0870-486c-9af3-c172240ee976-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "90a13ad5-0870-486c-9af3-c172240ee976" (UID: "90a13ad5-0870-486c-9af3-c172240ee976"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.643552 4636 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90a13ad5-0870-486c-9af3-c172240ee976-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.644052 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcthl\" (UniqueName: \"kubernetes.io/projected/90a13ad5-0870-486c-9af3-c172240ee976-kube-api-access-kcthl\") on node \"crc\" DevicePath \"\"" Oct 02 21:30:03 crc kubenswrapper[4636]: I1002 21:30:03.644115 4636 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90a13ad5-0870-486c-9af3-c172240ee976-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 21:30:23 crc kubenswrapper[4636]: I1002 21:30:23.117399 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:30:23 crc kubenswrapper[4636]: I1002 21:30:23.118913 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:30:23 crc kubenswrapper[4636]: I1002 21:30:23.118969 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:30:23 crc kubenswrapper[4636]: I1002 21:30:23.119448 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"26b2e9d95b3bc56cbc9ee3b547158382c6f070be16aa5848fb638c1a78fed6dd"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 21:30:23 crc kubenswrapper[4636]: I1002 21:30:23.119521 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://26b2e9d95b3bc56cbc9ee3b547158382c6f070be16aa5848fb638c1a78fed6dd" gracePeriod=600 Oct 02 21:30:23 crc kubenswrapper[4636]: I1002 21:30:23.658125 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="26b2e9d95b3bc56cbc9ee3b547158382c6f070be16aa5848fb638c1a78fed6dd" exitCode=0 Oct 02 21:30:23 crc kubenswrapper[4636]: I1002 21:30:23.658162 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"26b2e9d95b3bc56cbc9ee3b547158382c6f070be16aa5848fb638c1a78fed6dd"} Oct 02 21:30:23 crc kubenswrapper[4636]: I1002 21:30:23.658585 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"29a2a6f09d5129082985273f8dec3b68fd722674f2b01916004ca2855f54c75b"} Oct 02 21:30:23 crc kubenswrapper[4636]: I1002 21:30:23.658848 4636 scope.go:117] "RemoveContainer" containerID="26ff6db18e4b76d38d23733199b80d9ff594c30791f00a1e3f026e4dfe6fa488" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.433781 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-dkk7c"] Oct 02 21:31:24 crc kubenswrapper[4636]: E1002 21:31:24.434521 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90a13ad5-0870-486c-9af3-c172240ee976" containerName="collect-profiles" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.434537 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="90a13ad5-0870-486c-9af3-c172240ee976" containerName="collect-profiles" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.434637 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="90a13ad5-0870-486c-9af3-c172240ee976" containerName="collect-profiles" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.435036 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.463278 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-dkk7c"] Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.538405 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.538487 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-registry-certificates\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.538519 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-trusted-ca\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.538546 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-registry-tls\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.538579 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-installation-pull-secrets\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.538802 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-bound-sa-token\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.538889 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7qzx\" (UniqueName: \"kubernetes.io/projected/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-kube-api-access-j7qzx\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.538943 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-ca-trust-extracted\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.572553 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.640047 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-registry-certificates\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.640094 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-trusted-ca\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.640127 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-registry-tls\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.640202 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-installation-pull-secrets\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.641822 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-bound-sa-token\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.642892 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-registry-certificates\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.642926 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7qzx\" (UniqueName: \"kubernetes.io/projected/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-kube-api-access-j7qzx\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.642500 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-trusted-ca\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.643220 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-ca-trust-extracted\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.645371 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-ca-trust-extracted\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.646067 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-installation-pull-secrets\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.657237 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-registry-tls\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.660868 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7qzx\" (UniqueName: \"kubernetes.io/projected/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-kube-api-access-j7qzx\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.661368 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba-bound-sa-token\") pod \"image-registry-66df7c8f76-dkk7c\" (UID: \"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba\") " pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.752606 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:24 crc kubenswrapper[4636]: I1002 21:31:24.927254 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-dkk7c"] Oct 02 21:31:25 crc kubenswrapper[4636]: I1002 21:31:25.012262 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" event={"ID":"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba","Type":"ContainerStarted","Data":"2c672620ce8677abb3e3cd461ba71a74eb29a7c956f9c53fb0394422bf481699"} Oct 02 21:31:26 crc kubenswrapper[4636]: I1002 21:31:26.018654 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" event={"ID":"9c8a99a7-4b0f-43fc-86ba-54c94e7f39ba","Type":"ContainerStarted","Data":"ea8341c76fc538bdf4c63375cb2d5e7097f70de2d7f00016fea8d122e570376f"} Oct 02 21:31:26 crc kubenswrapper[4636]: I1002 21:31:26.019900 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:26 crc kubenswrapper[4636]: I1002 21:31:26.038362 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" podStartSLOduration=2.038348963 podStartE2EDuration="2.038348963s" podCreationTimestamp="2025-10-02 21:31:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:31:26.036246524 +0000 UTC m=+477.359254553" watchObservedRunningTime="2025-10-02 21:31:26.038348963 +0000 UTC m=+477.361356982" Oct 02 21:31:44 crc kubenswrapper[4636]: I1002 21:31:44.761399 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-dkk7c" Oct 02 21:31:44 crc kubenswrapper[4636]: I1002 21:31:44.871186 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqzhj"] Oct 02 21:32:09 crc kubenswrapper[4636]: I1002 21:32:09.922649 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" podUID="678df1e2-1565-4186-9221-80dac59e28aa" containerName="registry" containerID="cri-o://78d9d129cc5d73af57d585eb63ba25125226cc949ee1590a5e2a32a71daa1ee2" gracePeriod=30 Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.251027 4636 generic.go:334] "Generic (PLEG): container finished" podID="678df1e2-1565-4186-9221-80dac59e28aa" containerID="78d9d129cc5d73af57d585eb63ba25125226cc949ee1590a5e2a32a71daa1ee2" exitCode=0 Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.251333 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" event={"ID":"678df1e2-1565-4186-9221-80dac59e28aa","Type":"ContainerDied","Data":"78d9d129cc5d73af57d585eb63ba25125226cc949ee1590a5e2a32a71daa1ee2"} Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.301409 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.462833 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk8hq\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-kube-api-access-wk8hq\") pod \"678df1e2-1565-4186-9221-80dac59e28aa\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.462907 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/678df1e2-1565-4186-9221-80dac59e28aa-ca-trust-extracted\") pod \"678df1e2-1565-4186-9221-80dac59e28aa\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.463035 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-registry-tls\") pod \"678df1e2-1565-4186-9221-80dac59e28aa\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.463099 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/678df1e2-1565-4186-9221-80dac59e28aa-installation-pull-secrets\") pod \"678df1e2-1565-4186-9221-80dac59e28aa\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.463156 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-trusted-ca\") pod \"678df1e2-1565-4186-9221-80dac59e28aa\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.463214 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-registry-certificates\") pod \"678df1e2-1565-4186-9221-80dac59e28aa\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.463404 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"678df1e2-1565-4186-9221-80dac59e28aa\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.463477 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-bound-sa-token\") pod \"678df1e2-1565-4186-9221-80dac59e28aa\" (UID: \"678df1e2-1565-4186-9221-80dac59e28aa\") " Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.464192 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "678df1e2-1565-4186-9221-80dac59e28aa" (UID: "678df1e2-1565-4186-9221-80dac59e28aa"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.464417 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "678df1e2-1565-4186-9221-80dac59e28aa" (UID: "678df1e2-1565-4186-9221-80dac59e28aa"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.470291 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/678df1e2-1565-4186-9221-80dac59e28aa-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "678df1e2-1565-4186-9221-80dac59e28aa" (UID: "678df1e2-1565-4186-9221-80dac59e28aa"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.470292 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-kube-api-access-wk8hq" (OuterVolumeSpecName: "kube-api-access-wk8hq") pod "678df1e2-1565-4186-9221-80dac59e28aa" (UID: "678df1e2-1565-4186-9221-80dac59e28aa"). InnerVolumeSpecName "kube-api-access-wk8hq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.470682 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "678df1e2-1565-4186-9221-80dac59e28aa" (UID: "678df1e2-1565-4186-9221-80dac59e28aa"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.474912 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "678df1e2-1565-4186-9221-80dac59e28aa" (UID: "678df1e2-1565-4186-9221-80dac59e28aa"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.476287 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "678df1e2-1565-4186-9221-80dac59e28aa" (UID: "678df1e2-1565-4186-9221-80dac59e28aa"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.485392 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/678df1e2-1565-4186-9221-80dac59e28aa-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "678df1e2-1565-4186-9221-80dac59e28aa" (UID: "678df1e2-1565-4186-9221-80dac59e28aa"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.565512 4636 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.565562 4636 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/678df1e2-1565-4186-9221-80dac59e28aa-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.565578 4636 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.565591 4636 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/678df1e2-1565-4186-9221-80dac59e28aa-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.565604 4636 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.565616 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk8hq\" (UniqueName: \"kubernetes.io/projected/678df1e2-1565-4186-9221-80dac59e28aa-kube-api-access-wk8hq\") on node \"crc\" DevicePath \"\"" Oct 02 21:32:10 crc kubenswrapper[4636]: I1002 21:32:10.565626 4636 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/678df1e2-1565-4186-9221-80dac59e28aa-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 02 21:32:11 crc kubenswrapper[4636]: I1002 21:32:11.259087 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" event={"ID":"678df1e2-1565-4186-9221-80dac59e28aa","Type":"ContainerDied","Data":"2b67439bbd0bff4ee76b421997b70dc64fd0d77c01db7d9c4162dd4cee544d7b"} Oct 02 21:32:11 crc kubenswrapper[4636]: I1002 21:32:11.259162 4636 scope.go:117] "RemoveContainer" containerID="78d9d129cc5d73af57d585eb63ba25125226cc949ee1590a5e2a32a71daa1ee2" Oct 02 21:32:11 crc kubenswrapper[4636]: I1002 21:32:11.259323 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qqzhj" Oct 02 21:32:11 crc kubenswrapper[4636]: I1002 21:32:11.295023 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqzhj"] Oct 02 21:32:11 crc kubenswrapper[4636]: I1002 21:32:11.298140 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qqzhj"] Oct 02 21:32:11 crc kubenswrapper[4636]: I1002 21:32:11.609640 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="678df1e2-1565-4186-9221-80dac59e28aa" path="/var/lib/kubelet/pods/678df1e2-1565-4186-9221-80dac59e28aa/volumes" Oct 02 21:32:23 crc kubenswrapper[4636]: I1002 21:32:23.164104 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:32:23 crc kubenswrapper[4636]: I1002 21:32:23.166954 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:32:29 crc kubenswrapper[4636]: I1002 21:32:29.757872 4636 scope.go:117] "RemoveContainer" containerID="5d517207bac2087b18bf8dd4464f0b4fe8ab8a009301b9fa1aa8e5967733fa08" Oct 02 21:32:53 crc kubenswrapper[4636]: I1002 21:32:53.117459 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:32:53 crc kubenswrapper[4636]: I1002 21:32:53.118053 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:33:23 crc kubenswrapper[4636]: I1002 21:33:23.117383 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:33:23 crc kubenswrapper[4636]: I1002 21:33:23.118188 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:33:23 crc kubenswrapper[4636]: I1002 21:33:23.118287 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:33:23 crc kubenswrapper[4636]: I1002 21:33:23.119129 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"29a2a6f09d5129082985273f8dec3b68fd722674f2b01916004ca2855f54c75b"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 21:33:23 crc kubenswrapper[4636]: I1002 21:33:23.119229 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://29a2a6f09d5129082985273f8dec3b68fd722674f2b01916004ca2855f54c75b" gracePeriod=600 Oct 02 21:33:23 crc kubenswrapper[4636]: I1002 21:33:23.697141 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="29a2a6f09d5129082985273f8dec3b68fd722674f2b01916004ca2855f54c75b" exitCode=0 Oct 02 21:33:23 crc kubenswrapper[4636]: I1002 21:33:23.697240 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"29a2a6f09d5129082985273f8dec3b68fd722674f2b01916004ca2855f54c75b"} Oct 02 21:33:23 crc kubenswrapper[4636]: I1002 21:33:23.697534 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"ab44b8ce55b9d49cf042f8f75e697442d8007223e48a516b488f1007f0a6409a"} Oct 02 21:33:23 crc kubenswrapper[4636]: I1002 21:33:23.697561 4636 scope.go:117] "RemoveContainer" containerID="26b2e9d95b3bc56cbc9ee3b547158382c6f070be16aa5848fb638c1a78fed6dd" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.410081 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-c6gl4"] Oct 02 21:35:08 crc kubenswrapper[4636]: E1002 21:35:08.410795 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="678df1e2-1565-4186-9221-80dac59e28aa" containerName="registry" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.410809 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="678df1e2-1565-4186-9221-80dac59e28aa" containerName="registry" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.410908 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="678df1e2-1565-4186-9221-80dac59e28aa" containerName="registry" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.411231 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-c6gl4" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.412855 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.413643 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.413806 4636 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-w8xxh" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.429983 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-mkcl7"] Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.431720 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-mkcl7" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.436234 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-c6gl4"] Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.437871 4636 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-pdtgs" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.460515 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-mkcl7"] Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.465962 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-lvpzq"] Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.467196 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-lvpzq" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.471215 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97spc\" (UniqueName: \"kubernetes.io/projected/48ac323f-d140-4cca-947b-e85f9028457e-kube-api-access-97spc\") pod \"cert-manager-cainjector-7f985d654d-c6gl4\" (UID: \"48ac323f-d140-4cca-947b-e85f9028457e\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-c6gl4" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.471405 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxftv\" (UniqueName: \"kubernetes.io/projected/f62f25b1-27ac-4af1-b2bb-e267d068a038-kube-api-access-sxftv\") pod \"cert-manager-5b446d88c5-mkcl7\" (UID: \"f62f25b1-27ac-4af1-b2bb-e267d068a038\") " pod="cert-manager/cert-manager-5b446d88c5-mkcl7" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.472254 4636 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-z2bhw" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.476464 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-lvpzq"] Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.572343 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97spc\" (UniqueName: \"kubernetes.io/projected/48ac323f-d140-4cca-947b-e85f9028457e-kube-api-access-97spc\") pod \"cert-manager-cainjector-7f985d654d-c6gl4\" (UID: \"48ac323f-d140-4cca-947b-e85f9028457e\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-c6gl4" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.572601 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trgkx\" (UniqueName: \"kubernetes.io/projected/4df03eaa-7663-483f-b06d-2332ee514879-kube-api-access-trgkx\") pod \"cert-manager-webhook-5655c58dd6-lvpzq\" (UID: \"4df03eaa-7663-483f-b06d-2332ee514879\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-lvpzq" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.572713 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxftv\" (UniqueName: \"kubernetes.io/projected/f62f25b1-27ac-4af1-b2bb-e267d068a038-kube-api-access-sxftv\") pod \"cert-manager-5b446d88c5-mkcl7\" (UID: \"f62f25b1-27ac-4af1-b2bb-e267d068a038\") " pod="cert-manager/cert-manager-5b446d88c5-mkcl7" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.588606 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97spc\" (UniqueName: \"kubernetes.io/projected/48ac323f-d140-4cca-947b-e85f9028457e-kube-api-access-97spc\") pod \"cert-manager-cainjector-7f985d654d-c6gl4\" (UID: \"48ac323f-d140-4cca-947b-e85f9028457e\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-c6gl4" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.589194 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxftv\" (UniqueName: \"kubernetes.io/projected/f62f25b1-27ac-4af1-b2bb-e267d068a038-kube-api-access-sxftv\") pod \"cert-manager-5b446d88c5-mkcl7\" (UID: \"f62f25b1-27ac-4af1-b2bb-e267d068a038\") " pod="cert-manager/cert-manager-5b446d88c5-mkcl7" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.674515 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trgkx\" (UniqueName: \"kubernetes.io/projected/4df03eaa-7663-483f-b06d-2332ee514879-kube-api-access-trgkx\") pod \"cert-manager-webhook-5655c58dd6-lvpzq\" (UID: \"4df03eaa-7663-483f-b06d-2332ee514879\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-lvpzq" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.691072 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trgkx\" (UniqueName: \"kubernetes.io/projected/4df03eaa-7663-483f-b06d-2332ee514879-kube-api-access-trgkx\") pod \"cert-manager-webhook-5655c58dd6-lvpzq\" (UID: \"4df03eaa-7663-483f-b06d-2332ee514879\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-lvpzq" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.748447 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-c6gl4" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.773450 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-mkcl7" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.790648 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-lvpzq" Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.947961 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-c6gl4"] Oct 02 21:35:08 crc kubenswrapper[4636]: I1002 21:35:08.964585 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 21:35:09 crc kubenswrapper[4636]: I1002 21:35:09.071016 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-lvpzq"] Oct 02 21:35:09 crc kubenswrapper[4636]: W1002 21:35:09.074827 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4df03eaa_7663_483f_b06d_2332ee514879.slice/crio-0dacca39cff7d3ab585484dc7b1b0412982d60a3a20e9b5e5c3c548568a19d1b WatchSource:0}: Error finding container 0dacca39cff7d3ab585484dc7b1b0412982d60a3a20e9b5e5c3c548568a19d1b: Status 404 returned error can't find the container with id 0dacca39cff7d3ab585484dc7b1b0412982d60a3a20e9b5e5c3c548568a19d1b Oct 02 21:35:09 crc kubenswrapper[4636]: I1002 21:35:09.207883 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-mkcl7"] Oct 02 21:35:09 crc kubenswrapper[4636]: W1002 21:35:09.211361 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf62f25b1_27ac_4af1_b2bb_e267d068a038.slice/crio-774a1f6cfb78441f46d8ab84938e89a44b46a80c3da07558e262df81f6a1da44 WatchSource:0}: Error finding container 774a1f6cfb78441f46d8ab84938e89a44b46a80c3da07558e262df81f6a1da44: Status 404 returned error can't find the container with id 774a1f6cfb78441f46d8ab84938e89a44b46a80c3da07558e262df81f6a1da44 Oct 02 21:35:09 crc kubenswrapper[4636]: I1002 21:35:09.274088 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-lvpzq" event={"ID":"4df03eaa-7663-483f-b06d-2332ee514879","Type":"ContainerStarted","Data":"0dacca39cff7d3ab585484dc7b1b0412982d60a3a20e9b5e5c3c548568a19d1b"} Oct 02 21:35:09 crc kubenswrapper[4636]: I1002 21:35:09.275239 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-c6gl4" event={"ID":"48ac323f-d140-4cca-947b-e85f9028457e","Type":"ContainerStarted","Data":"cb29c1536b9f3c34740b62877a9c7974387a536366276af676085c5afce71504"} Oct 02 21:35:09 crc kubenswrapper[4636]: I1002 21:35:09.276520 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-mkcl7" event={"ID":"f62f25b1-27ac-4af1-b2bb-e267d068a038","Type":"ContainerStarted","Data":"774a1f6cfb78441f46d8ab84938e89a44b46a80c3da07558e262df81f6a1da44"} Oct 02 21:35:13 crc kubenswrapper[4636]: I1002 21:35:13.304629 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-mkcl7" event={"ID":"f62f25b1-27ac-4af1-b2bb-e267d068a038","Type":"ContainerStarted","Data":"54fe29b65a7bdb57ee939e92c7448fe96019eb95111b07f9679e83b071bdb622"} Oct 02 21:35:13 crc kubenswrapper[4636]: I1002 21:35:13.306792 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-lvpzq" event={"ID":"4df03eaa-7663-483f-b06d-2332ee514879","Type":"ContainerStarted","Data":"421fd7ab180d6ec09a1b0581351a98dbbe3ffc7500aeda9fc06e51b96370f36c"} Oct 02 21:35:13 crc kubenswrapper[4636]: I1002 21:35:13.309473 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-c6gl4" event={"ID":"48ac323f-d140-4cca-947b-e85f9028457e","Type":"ContainerStarted","Data":"1bcc6434f1882a3d82229618cd580b3c3e2df9f49c73a170a25fc0e9b81dac3b"} Oct 02 21:35:13 crc kubenswrapper[4636]: I1002 21:35:13.330038 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-mkcl7" podStartSLOduration=2.201862905 podStartE2EDuration="5.330024155s" podCreationTimestamp="2025-10-02 21:35:08 +0000 UTC" firstStartedPulling="2025-10-02 21:35:09.213295198 +0000 UTC m=+700.536303217" lastFinishedPulling="2025-10-02 21:35:12.341456438 +0000 UTC m=+703.664464467" observedRunningTime="2025-10-02 21:35:13.328917465 +0000 UTC m=+704.651925484" watchObservedRunningTime="2025-10-02 21:35:13.330024155 +0000 UTC m=+704.653032174" Oct 02 21:35:13 crc kubenswrapper[4636]: I1002 21:35:13.367112 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-c6gl4" podStartSLOduration=3.263280073 podStartE2EDuration="5.367093694s" podCreationTimestamp="2025-10-02 21:35:08 +0000 UTC" firstStartedPulling="2025-10-02 21:35:08.964337612 +0000 UTC m=+700.287345631" lastFinishedPulling="2025-10-02 21:35:11.068151233 +0000 UTC m=+702.391159252" observedRunningTime="2025-10-02 21:35:13.35080099 +0000 UTC m=+704.673809029" watchObservedRunningTime="2025-10-02 21:35:13.367093694 +0000 UTC m=+704.690101733" Oct 02 21:35:13 crc kubenswrapper[4636]: I1002 21:35:13.374445 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-lvpzq" podStartSLOduration=2.093786603 podStartE2EDuration="5.374418343s" podCreationTimestamp="2025-10-02 21:35:08 +0000 UTC" firstStartedPulling="2025-10-02 21:35:09.07742219 +0000 UTC m=+700.400430209" lastFinishedPulling="2025-10-02 21:35:12.35805392 +0000 UTC m=+703.681061949" observedRunningTime="2025-10-02 21:35:13.36695224 +0000 UTC m=+704.689960269" watchObservedRunningTime="2025-10-02 21:35:13.374418343 +0000 UTC m=+704.697426382" Oct 02 21:35:13 crc kubenswrapper[4636]: I1002 21:35:13.791679 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-lvpzq" Oct 02 21:35:18 crc kubenswrapper[4636]: I1002 21:35:18.792941 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-lvpzq" Oct 02 21:35:18 crc kubenswrapper[4636]: I1002 21:35:18.986511 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l7qm8"] Oct 02 21:35:18 crc kubenswrapper[4636]: I1002 21:35:18.986999 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovn-controller" containerID="cri-o://ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814" gracePeriod=30 Oct 02 21:35:18 crc kubenswrapper[4636]: I1002 21:35:18.987129 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="kube-rbac-proxy-node" containerID="cri-o://3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850" gracePeriod=30 Oct 02 21:35:18 crc kubenswrapper[4636]: I1002 21:35:18.987101 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="nbdb" containerID="cri-o://44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec" gracePeriod=30 Oct 02 21:35:18 crc kubenswrapper[4636]: I1002 21:35:18.987200 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovn-acl-logging" containerID="cri-o://a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71" gracePeriod=30 Oct 02 21:35:18 crc kubenswrapper[4636]: I1002 21:35:18.987136 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f" gracePeriod=30 Oct 02 21:35:18 crc kubenswrapper[4636]: I1002 21:35:18.987612 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="sbdb" containerID="cri-o://a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282" gracePeriod=30 Oct 02 21:35:18 crc kubenswrapper[4636]: I1002 21:35:18.987184 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="northd" containerID="cri-o://8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3" gracePeriod=30 Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.037153 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" containerID="cri-o://38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492" gracePeriod=30 Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.334637 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/3.log" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.338269 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovn-acl-logging/0.log" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.338686 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovn-controller/0.log" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.339122 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.344894 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-895mm_3a64b152-90d7-4dd0-be73-17e987476a1c/kube-multus/2.log" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.345299 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-895mm_3a64b152-90d7-4dd0-be73-17e987476a1c/kube-multus/1.log" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.345343 4636 generic.go:334] "Generic (PLEG): container finished" podID="3a64b152-90d7-4dd0-be73-17e987476a1c" containerID="1a2ea439df06f9806eab9afc3c9eafa2569042c5f9505062da564e81d1d281ff" exitCode=2 Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.345399 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-895mm" event={"ID":"3a64b152-90d7-4dd0-be73-17e987476a1c","Type":"ContainerDied","Data":"1a2ea439df06f9806eab9afc3c9eafa2569042c5f9505062da564e81d1d281ff"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.345436 4636 scope.go:117] "RemoveContainer" containerID="b9c9a53b73923661f98724b9b5df130c60338da71e1d78533de52fc022d60f8d" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.345935 4636 scope.go:117] "RemoveContainer" containerID="1a2ea439df06f9806eab9afc3c9eafa2569042c5f9505062da564e81d1d281ff" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.346122 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-895mm_openshift-multus(3a64b152-90d7-4dd0-be73-17e987476a1c)\"" pod="openshift-multus/multus-895mm" podUID="3a64b152-90d7-4dd0-be73-17e987476a1c" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.349116 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovnkube-controller/3.log" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.352550 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovn-acl-logging/0.log" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353038 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7qm8_db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/ovn-controller/0.log" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353379 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492" exitCode=0 Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353402 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282" exitCode=0 Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353413 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec" exitCode=0 Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353422 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3" exitCode=0 Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353433 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f" exitCode=0 Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353442 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850" exitCode=0 Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353450 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71" exitCode=143 Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353459 4636 generic.go:334] "Generic (PLEG): container finished" podID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerID="ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814" exitCode=143 Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353452 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353488 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353518 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353535 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353544 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353557 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353584 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353596 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353607 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353613 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353618 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353623 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353628 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353633 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353638 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353660 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353666 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353675 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353684 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353690 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353695 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353700 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353705 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353710 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353715 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353720 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353741 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353756 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353766 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353775 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353783 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353788 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353792 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353798 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353839 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353844 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353849 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353855 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353860 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353868 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7qm8" event={"ID":"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6","Type":"ContainerDied","Data":"b706a209b6c9797279961e88fcdf992ef0f94f3cadbccc28b39b4a1f4c242af5"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353876 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353945 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353950 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353955 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353960 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353965 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353970 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353975 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353979 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.353984 4636 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701"} Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.378708 4636 scope.go:117] "RemoveContainer" containerID="38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.393676 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6sf9w"] Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.393969 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="sbdb" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.393986 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="sbdb" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.393998 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394008 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.394015 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="kube-rbac-proxy-node" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394021 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="kube-rbac-proxy-node" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.394031 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394037 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.394045 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="kube-rbac-proxy-ovn-metrics" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394052 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="kube-rbac-proxy-ovn-metrics" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.394060 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394066 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.394073 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="nbdb" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394079 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="nbdb" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.394087 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovn-acl-logging" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394094 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovn-acl-logging" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.394103 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="northd" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394109 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="northd" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.394116 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394122 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.394130 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="kubecfg-setup" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394135 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="kubecfg-setup" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.394144 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovn-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394150 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovn-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394232 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="kube-rbac-proxy-node" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394242 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="kube-rbac-proxy-ovn-metrics" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394249 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394256 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="nbdb" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394264 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394272 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394279 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394287 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="northd" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394294 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovn-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394301 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovn-acl-logging" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394307 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394314 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="sbdb" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.394395 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.394401 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" containerName="ovnkube-controller" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.395989 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.406480 4636 scope.go:117] "RemoveContainer" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.416859 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-kubelet\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.416927 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417003 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-systemd\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417021 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-slash\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417078 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-run-ovn\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417129 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-slash\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417145 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417159 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-systemd-units\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417172 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-etc-openvswitch\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417185 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4c683b39-cebc-4987-a9fd-2c0fa700425f-ovnkube-config\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417198 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-cni-netd\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417215 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-var-lib-openvswitch\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417232 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr6r2\" (UniqueName: \"kubernetes.io/projected/4c683b39-cebc-4987-a9fd-2c0fa700425f-kube-api-access-hr6r2\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417245 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-cni-bin\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417261 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-run-netns\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417274 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-log-socket\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417291 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-kubelet\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417313 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-node-log\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417329 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4c683b39-cebc-4987-a9fd-2c0fa700425f-ovn-node-metrics-cert\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417345 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-run-openvswitch\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417359 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4c683b39-cebc-4987-a9fd-2c0fa700425f-ovnkube-script-lib\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417376 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4c683b39-cebc-4987-a9fd-2c0fa700425f-env-overrides\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417395 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-run-ovn-kubernetes\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417411 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-run-systemd\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417450 4636 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.417843 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-slash" (OuterVolumeSpecName: "host-slash") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.428856 4636 scope.go:117] "RemoveContainer" containerID="a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.436317 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.440423 4636 scope.go:117] "RemoveContainer" containerID="44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.453732 4636 scope.go:117] "RemoveContainer" containerID="8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.466630 4636 scope.go:117] "RemoveContainer" containerID="4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.480411 4636 scope.go:117] "RemoveContainer" containerID="3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.495897 4636 scope.go:117] "RemoveContainer" containerID="a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.514413 4636 scope.go:117] "RemoveContainer" containerID="ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518372 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shwzf\" (UniqueName: \"kubernetes.io/projected/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-kube-api-access-shwzf\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518422 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovn-node-metrics-cert\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518452 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-bin\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518504 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-script-lib\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518507 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518528 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-etc-openvswitch\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518547 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-systemd-units\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518598 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-netd\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518621 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-ovn-kubernetes\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518622 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518676 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518681 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518721 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-ovn\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518781 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518792 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518745 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-log-socket\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518861 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-config\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518872 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-log-socket" (OuterVolumeSpecName: "log-socket") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518903 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518920 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-netns\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518966 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.518976 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-var-lib-openvswitch\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.519053 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.519097 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-node-log\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.519114 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-node-log" (OuterVolumeSpecName: "node-log") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.519280 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-openvswitch\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.519388 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.519452 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.519526 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.519599 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-env-overrides\") pod \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\" (UID: \"db86cff1-cf8b-4043-bbd7-c41ea2b72ad6\") " Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.519743 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.520096 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.520838 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-etc-openvswitch\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.520943 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-etc-openvswitch\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.520991 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4c683b39-cebc-4987-a9fd-2c0fa700425f-ovnkube-config\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521029 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-cni-netd\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521063 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-var-lib-openvswitch\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521100 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr6r2\" (UniqueName: \"kubernetes.io/projected/4c683b39-cebc-4987-a9fd-2c0fa700425f-kube-api-access-hr6r2\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521130 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-cni-bin\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521170 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-run-netns\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521243 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-log-socket\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521281 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-kubelet\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521307 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-kube-api-access-shwzf" (OuterVolumeSpecName: "kube-api-access-shwzf") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "kube-api-access-shwzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521328 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-node-log\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521364 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4c683b39-cebc-4987-a9fd-2c0fa700425f-ovn-node-metrics-cert\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521391 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-cni-bin\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521399 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-run-openvswitch\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521437 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-run-openvswitch\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521450 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4c683b39-cebc-4987-a9fd-2c0fa700425f-ovnkube-script-lib\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521488 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4c683b39-cebc-4987-a9fd-2c0fa700425f-env-overrides\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521520 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-run-ovn-kubernetes\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521525 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-run-netns\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521538 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4c683b39-cebc-4987-a9fd-2c0fa700425f-ovnkube-config\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521555 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-run-systemd\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521576 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-node-log\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521608 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-kubelet\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.521630 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-log-socket\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522073 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4c683b39-cebc-4987-a9fd-2c0fa700425f-ovnkube-script-lib\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522100 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-run-systemd\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522113 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-var-lib-openvswitch\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522125 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-cni-netd\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522150 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-run-ovn-kubernetes\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522188 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-run-ovn\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522218 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-slash\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522232 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522250 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-systemd-units\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522289 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-systemd-units\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522303 4636 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522321 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-run-ovn\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522320 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4c683b39-cebc-4987-a9fd-2c0fa700425f-env-overrides\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522342 4636 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522356 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522367 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4c683b39-cebc-4987-a9fd-2c0fa700425f-host-slash\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522382 4636 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522471 4636 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-slash\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522480 4636 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522488 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shwzf\" (UniqueName: \"kubernetes.io/projected/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-kube-api-access-shwzf\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522496 4636 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522503 4636 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522511 4636 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522519 4636 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522526 4636 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522534 4636 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522541 4636 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522548 4636 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-log-socket\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522557 4636 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522565 4636 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522573 4636 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522581 4636 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-node-log\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.522725 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" (UID: "db86cff1-cf8b-4043-bbd7-c41ea2b72ad6"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.526731 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4c683b39-cebc-4987-a9fd-2c0fa700425f-ovn-node-metrics-cert\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.541545 4636 scope.go:117] "RemoveContainer" containerID="8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.548595 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr6r2\" (UniqueName: \"kubernetes.io/projected/4c683b39-cebc-4987-a9fd-2c0fa700425f-kube-api-access-hr6r2\") pod \"ovnkube-node-6sf9w\" (UID: \"4c683b39-cebc-4987-a9fd-2c0fa700425f\") " pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.555204 4636 scope.go:117] "RemoveContainer" containerID="38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.555772 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492\": container with ID starting with 38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492 not found: ID does not exist" containerID="38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.555825 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492"} err="failed to get container status \"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492\": rpc error: code = NotFound desc = could not find container \"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492\": container with ID starting with 38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.556025 4636 scope.go:117] "RemoveContainer" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.557526 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\": container with ID starting with 0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0 not found: ID does not exist" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.557574 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0"} err="failed to get container status \"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\": rpc error: code = NotFound desc = could not find container \"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\": container with ID starting with 0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.557597 4636 scope.go:117] "RemoveContainer" containerID="a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.558431 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\": container with ID starting with a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282 not found: ID does not exist" containerID="a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.558738 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282"} err="failed to get container status \"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\": rpc error: code = NotFound desc = could not find container \"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\": container with ID starting with a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.558846 4636 scope.go:117] "RemoveContainer" containerID="44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.560028 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\": container with ID starting with 44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec not found: ID does not exist" containerID="44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.560054 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec"} err="failed to get container status \"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\": rpc error: code = NotFound desc = could not find container \"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\": container with ID starting with 44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.560082 4636 scope.go:117] "RemoveContainer" containerID="8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.560571 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\": container with ID starting with 8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3 not found: ID does not exist" containerID="8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.560615 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3"} err="failed to get container status \"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\": rpc error: code = NotFound desc = could not find container \"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\": container with ID starting with 8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.560639 4636 scope.go:117] "RemoveContainer" containerID="4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.560990 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\": container with ID starting with 4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f not found: ID does not exist" containerID="4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.561021 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f"} err="failed to get container status \"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\": rpc error: code = NotFound desc = could not find container \"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\": container with ID starting with 4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.561041 4636 scope.go:117] "RemoveContainer" containerID="3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.561338 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\": container with ID starting with 3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850 not found: ID does not exist" containerID="3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.561369 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850"} err="failed to get container status \"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\": rpc error: code = NotFound desc = could not find container \"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\": container with ID starting with 3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.561387 4636 scope.go:117] "RemoveContainer" containerID="a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.561663 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\": container with ID starting with a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71 not found: ID does not exist" containerID="a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.561690 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71"} err="failed to get container status \"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\": rpc error: code = NotFound desc = could not find container \"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\": container with ID starting with a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.561708 4636 scope.go:117] "RemoveContainer" containerID="ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.562003 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\": container with ID starting with ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814 not found: ID does not exist" containerID="ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.562043 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814"} err="failed to get container status \"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\": rpc error: code = NotFound desc = could not find container \"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\": container with ID starting with ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.562063 4636 scope.go:117] "RemoveContainer" containerID="8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701" Oct 02 21:35:19 crc kubenswrapper[4636]: E1002 21:35:19.562386 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\": container with ID starting with 8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701 not found: ID does not exist" containerID="8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.562415 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701"} err="failed to get container status \"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\": rpc error: code = NotFound desc = could not find container \"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\": container with ID starting with 8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.562433 4636 scope.go:117] "RemoveContainer" containerID="38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.562675 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492"} err="failed to get container status \"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492\": rpc error: code = NotFound desc = could not find container \"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492\": container with ID starting with 38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.562704 4636 scope.go:117] "RemoveContainer" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.562916 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0"} err="failed to get container status \"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\": rpc error: code = NotFound desc = could not find container \"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\": container with ID starting with 0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.562939 4636 scope.go:117] "RemoveContainer" containerID="a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.563628 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282"} err="failed to get container status \"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\": rpc error: code = NotFound desc = could not find container \"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\": container with ID starting with a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.563646 4636 scope.go:117] "RemoveContainer" containerID="44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.564173 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec"} err="failed to get container status \"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\": rpc error: code = NotFound desc = could not find container \"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\": container with ID starting with 44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.564200 4636 scope.go:117] "RemoveContainer" containerID="8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.564899 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3"} err="failed to get container status \"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\": rpc error: code = NotFound desc = could not find container \"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\": container with ID starting with 8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.564918 4636 scope.go:117] "RemoveContainer" containerID="4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.565344 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f"} err="failed to get container status \"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\": rpc error: code = NotFound desc = could not find container \"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\": container with ID starting with 4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.565373 4636 scope.go:117] "RemoveContainer" containerID="3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.565710 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850"} err="failed to get container status \"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\": rpc error: code = NotFound desc = could not find container \"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\": container with ID starting with 3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.565738 4636 scope.go:117] "RemoveContainer" containerID="a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.565959 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71"} err="failed to get container status \"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\": rpc error: code = NotFound desc = could not find container \"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\": container with ID starting with a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.565975 4636 scope.go:117] "RemoveContainer" containerID="ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.566295 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814"} err="failed to get container status \"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\": rpc error: code = NotFound desc = could not find container \"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\": container with ID starting with ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.566312 4636 scope.go:117] "RemoveContainer" containerID="8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.566721 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701"} err="failed to get container status \"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\": rpc error: code = NotFound desc = could not find container \"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\": container with ID starting with 8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.566738 4636 scope.go:117] "RemoveContainer" containerID="38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.567236 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492"} err="failed to get container status \"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492\": rpc error: code = NotFound desc = could not find container \"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492\": container with ID starting with 38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.567268 4636 scope.go:117] "RemoveContainer" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.567628 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0"} err="failed to get container status \"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\": rpc error: code = NotFound desc = could not find container \"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\": container with ID starting with 0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.567653 4636 scope.go:117] "RemoveContainer" containerID="a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.567886 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282"} err="failed to get container status \"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\": rpc error: code = NotFound desc = could not find container \"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\": container with ID starting with a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.567909 4636 scope.go:117] "RemoveContainer" containerID="44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.568162 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec"} err="failed to get container status \"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\": rpc error: code = NotFound desc = could not find container \"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\": container with ID starting with 44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.568185 4636 scope.go:117] "RemoveContainer" containerID="8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.568438 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3"} err="failed to get container status \"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\": rpc error: code = NotFound desc = could not find container \"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\": container with ID starting with 8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.568463 4636 scope.go:117] "RemoveContainer" containerID="4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.569960 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f"} err="failed to get container status \"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\": rpc error: code = NotFound desc = could not find container \"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\": container with ID starting with 4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.569983 4636 scope.go:117] "RemoveContainer" containerID="3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.570245 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850"} err="failed to get container status \"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\": rpc error: code = NotFound desc = could not find container \"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\": container with ID starting with 3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.570264 4636 scope.go:117] "RemoveContainer" containerID="a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.570511 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71"} err="failed to get container status \"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\": rpc error: code = NotFound desc = could not find container \"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\": container with ID starting with a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.570531 4636 scope.go:117] "RemoveContainer" containerID="ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.570776 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814"} err="failed to get container status \"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\": rpc error: code = NotFound desc = could not find container \"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\": container with ID starting with ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.570795 4636 scope.go:117] "RemoveContainer" containerID="8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.570995 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701"} err="failed to get container status \"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\": rpc error: code = NotFound desc = could not find container \"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\": container with ID starting with 8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.571017 4636 scope.go:117] "RemoveContainer" containerID="38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.571266 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492"} err="failed to get container status \"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492\": rpc error: code = NotFound desc = could not find container \"38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492\": container with ID starting with 38305fb8ac478fd3871ff9e8eaa384f510407d56801f4d244deb0689037e9492 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.571291 4636 scope.go:117] "RemoveContainer" containerID="0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.571593 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0"} err="failed to get container status \"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\": rpc error: code = NotFound desc = could not find container \"0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0\": container with ID starting with 0882a01043e339c9d26b96e1db19dbc485d495969926e9da4e6a9fa18bc756d0 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.571633 4636 scope.go:117] "RemoveContainer" containerID="a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.571987 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282"} err="failed to get container status \"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\": rpc error: code = NotFound desc = could not find container \"a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282\": container with ID starting with a2f36243f3c89eee60faeaf3e064b5c868f59eaf22e9a3e3ea641605e4260282 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.572015 4636 scope.go:117] "RemoveContainer" containerID="44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.572253 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec"} err="failed to get container status \"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\": rpc error: code = NotFound desc = could not find container \"44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec\": container with ID starting with 44f7c2d1406d37125aa043cc1d68694ab6f2ac83452fad13bcffa883ed8870ec not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.572279 4636 scope.go:117] "RemoveContainer" containerID="8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.572490 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3"} err="failed to get container status \"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\": rpc error: code = NotFound desc = could not find container \"8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3\": container with ID starting with 8f25a448e1897caed35fd909df6182be17980d5d8605bf259f316e3132a18ee3 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.572512 4636 scope.go:117] "RemoveContainer" containerID="4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.572703 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f"} err="failed to get container status \"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\": rpc error: code = NotFound desc = could not find container \"4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f\": container with ID starting with 4369f2b244a98095a14444aea5a7fcf5b3d4b5daff096c3a595196c6b5df738f not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.572723 4636 scope.go:117] "RemoveContainer" containerID="3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.572945 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850"} err="failed to get container status \"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\": rpc error: code = NotFound desc = could not find container \"3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850\": container with ID starting with 3b03691385e895bbc822684cbed64031a37fe63d3e10a75c3c9a75726f5aa850 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.572968 4636 scope.go:117] "RemoveContainer" containerID="a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.573157 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71"} err="failed to get container status \"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\": rpc error: code = NotFound desc = could not find container \"a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71\": container with ID starting with a3bae7cad422961bb9da3eb5588fbe580158350bd63512f6cfabcbc1570e5d71 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.573186 4636 scope.go:117] "RemoveContainer" containerID="ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.573372 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814"} err="failed to get container status \"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\": rpc error: code = NotFound desc = could not find container \"ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814\": container with ID starting with ad0606ee7c04f5de1b262394760d86bcc8b5f685aa8b48322488e12a8faec814 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.573393 4636 scope.go:117] "RemoveContainer" containerID="8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.573570 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701"} err="failed to get container status \"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\": rpc error: code = NotFound desc = could not find container \"8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701\": container with ID starting with 8f33b4fc6fda640a607402e0eb5aafcca065f80b4f170c252c01d5098b47a701 not found: ID does not exist" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.623453 4636 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.675864 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l7qm8"] Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.679289 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l7qm8"] Oct 02 21:35:19 crc kubenswrapper[4636]: I1002 21:35:19.712311 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:20 crc kubenswrapper[4636]: I1002 21:35:20.360577 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-895mm_3a64b152-90d7-4dd0-be73-17e987476a1c/kube-multus/2.log" Oct 02 21:35:20 crc kubenswrapper[4636]: I1002 21:35:20.361919 4636 generic.go:334] "Generic (PLEG): container finished" podID="4c683b39-cebc-4987-a9fd-2c0fa700425f" containerID="a1142a2767c0bd7614fdeab3083ecb942db50005357c91d70928bbb27f2f41d8" exitCode=0 Oct 02 21:35:20 crc kubenswrapper[4636]: I1002 21:35:20.361998 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" event={"ID":"4c683b39-cebc-4987-a9fd-2c0fa700425f","Type":"ContainerDied","Data":"a1142a2767c0bd7614fdeab3083ecb942db50005357c91d70928bbb27f2f41d8"} Oct 02 21:35:20 crc kubenswrapper[4636]: I1002 21:35:20.362025 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" event={"ID":"4c683b39-cebc-4987-a9fd-2c0fa700425f","Type":"ContainerStarted","Data":"d452e4b7845cc34c507b658a57b61045d1f76ed099fda3e66bed65f8f4479a16"} Oct 02 21:35:21 crc kubenswrapper[4636]: I1002 21:35:21.374017 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" event={"ID":"4c683b39-cebc-4987-a9fd-2c0fa700425f","Type":"ContainerStarted","Data":"76c91b5b238ca1b8a5b2054848cf746999b3222d8d11f55cc4496ae126966790"} Oct 02 21:35:21 crc kubenswrapper[4636]: I1002 21:35:21.374444 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" event={"ID":"4c683b39-cebc-4987-a9fd-2c0fa700425f","Type":"ContainerStarted","Data":"53356cbc0c07a0c4f95892240306abab646e7ac848fb9ad81c15923db0c5fb29"} Oct 02 21:35:21 crc kubenswrapper[4636]: I1002 21:35:21.374468 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" event={"ID":"4c683b39-cebc-4987-a9fd-2c0fa700425f","Type":"ContainerStarted","Data":"da4ed648cecc4bae2afa4d4c4812edb5dc0a8c4d7cb06654ff9e8615e33c57f5"} Oct 02 21:35:21 crc kubenswrapper[4636]: I1002 21:35:21.374485 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" event={"ID":"4c683b39-cebc-4987-a9fd-2c0fa700425f","Type":"ContainerStarted","Data":"5636cdd9681576dc9466f179c6727467d37733c343eea86edf55152f935958ab"} Oct 02 21:35:21 crc kubenswrapper[4636]: I1002 21:35:21.374501 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" event={"ID":"4c683b39-cebc-4987-a9fd-2c0fa700425f","Type":"ContainerStarted","Data":"dc48ae0c2b5d34a8c9ea87d118e2bebe1ea16597b4c069d49fda6d4848004eff"} Oct 02 21:35:21 crc kubenswrapper[4636]: I1002 21:35:21.374517 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" event={"ID":"4c683b39-cebc-4987-a9fd-2c0fa700425f","Type":"ContainerStarted","Data":"cb3c5c8b678de6e6df9311ebcb37b7a9301261a9291995a35b9cf26a877a6909"} Oct 02 21:35:21 crc kubenswrapper[4636]: I1002 21:35:21.612518 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db86cff1-cf8b-4043-bbd7-c41ea2b72ad6" path="/var/lib/kubelet/pods/db86cff1-cf8b-4043-bbd7-c41ea2b72ad6/volumes" Oct 02 21:35:23 crc kubenswrapper[4636]: I1002 21:35:23.117802 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:35:23 crc kubenswrapper[4636]: I1002 21:35:23.118274 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:35:23 crc kubenswrapper[4636]: I1002 21:35:23.389136 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" event={"ID":"4c683b39-cebc-4987-a9fd-2c0fa700425f","Type":"ContainerStarted","Data":"b8107d68c1b30db71280c9726e76de07193cb64483f13adb6db3393f35f05c66"} Oct 02 21:35:26 crc kubenswrapper[4636]: I1002 21:35:26.416275 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" event={"ID":"4c683b39-cebc-4987-a9fd-2c0fa700425f","Type":"ContainerStarted","Data":"47babe6cd4b25afba77726bb358487ce9e4f45d6a68866a743073649de44e3c9"} Oct 02 21:35:26 crc kubenswrapper[4636]: I1002 21:35:26.416991 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:26 crc kubenswrapper[4636]: I1002 21:35:26.417011 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:26 crc kubenswrapper[4636]: I1002 21:35:26.445780 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" podStartSLOduration=7.445745254 podStartE2EDuration="7.445745254s" podCreationTimestamp="2025-10-02 21:35:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:35:26.443072681 +0000 UTC m=+717.766080730" watchObservedRunningTime="2025-10-02 21:35:26.445745254 +0000 UTC m=+717.768753273" Oct 02 21:35:26 crc kubenswrapper[4636]: I1002 21:35:26.449897 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:27 crc kubenswrapper[4636]: I1002 21:35:27.420177 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:27 crc kubenswrapper[4636]: I1002 21:35:27.444140 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:30 crc kubenswrapper[4636]: I1002 21:35:30.604047 4636 scope.go:117] "RemoveContainer" containerID="1a2ea439df06f9806eab9afc3c9eafa2569042c5f9505062da564e81d1d281ff" Oct 02 21:35:30 crc kubenswrapper[4636]: E1002 21:35:30.605394 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-895mm_openshift-multus(3a64b152-90d7-4dd0-be73-17e987476a1c)\"" pod="openshift-multus/multus-895mm" podUID="3a64b152-90d7-4dd0-be73-17e987476a1c" Oct 02 21:35:42 crc kubenswrapper[4636]: I1002 21:35:42.603953 4636 scope.go:117] "RemoveContainer" containerID="1a2ea439df06f9806eab9afc3c9eafa2569042c5f9505062da564e81d1d281ff" Oct 02 21:35:43 crc kubenswrapper[4636]: I1002 21:35:43.516175 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-895mm_3a64b152-90d7-4dd0-be73-17e987476a1c/kube-multus/2.log" Oct 02 21:35:43 crc kubenswrapper[4636]: I1002 21:35:43.517046 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-895mm" event={"ID":"3a64b152-90d7-4dd0-be73-17e987476a1c","Type":"ContainerStarted","Data":"95c87f53c0d87ad198d66d92c5ae89e154b3f32290026485f05010a701e4880a"} Oct 02 21:35:49 crc kubenswrapper[4636]: I1002 21:35:49.751628 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6sf9w" Oct 02 21:35:53 crc kubenswrapper[4636]: I1002 21:35:53.117929 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:35:53 crc kubenswrapper[4636]: I1002 21:35:53.118448 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:36:00 crc kubenswrapper[4636]: I1002 21:36:00.863512 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx"] Oct 02 21:36:00 crc kubenswrapper[4636]: I1002 21:36:00.868437 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:00 crc kubenswrapper[4636]: I1002 21:36:00.876590 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 02 21:36:00 crc kubenswrapper[4636]: I1002 21:36:00.892615 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx"] Oct 02 21:36:00 crc kubenswrapper[4636]: I1002 21:36:00.996424 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drtfr\" (UniqueName: \"kubernetes.io/projected/65a4377d-c810-430b-ba89-cd89d5f68250-kube-api-access-drtfr\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:00 crc kubenswrapper[4636]: I1002 21:36:00.996493 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:00 crc kubenswrapper[4636]: I1002 21:36:00.996515 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:01 crc kubenswrapper[4636]: I1002 21:36:01.098089 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:01 crc kubenswrapper[4636]: I1002 21:36:01.098510 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:01 crc kubenswrapper[4636]: I1002 21:36:01.098570 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drtfr\" (UniqueName: \"kubernetes.io/projected/65a4377d-c810-430b-ba89-cd89d5f68250-kube-api-access-drtfr\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:01 crc kubenswrapper[4636]: I1002 21:36:01.098835 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:01 crc kubenswrapper[4636]: I1002 21:36:01.099571 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:01 crc kubenswrapper[4636]: I1002 21:36:01.121839 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drtfr\" (UniqueName: \"kubernetes.io/projected/65a4377d-c810-430b-ba89-cd89d5f68250-kube-api-access-drtfr\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:01 crc kubenswrapper[4636]: I1002 21:36:01.190367 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:01 crc kubenswrapper[4636]: I1002 21:36:01.620844 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx"] Oct 02 21:36:02 crc kubenswrapper[4636]: I1002 21:36:02.628179 4636 generic.go:334] "Generic (PLEG): container finished" podID="65a4377d-c810-430b-ba89-cd89d5f68250" containerID="472e621834fa1b21e61c1d3f5dc94d2abdad6b8179363fb2b8cae1d9791548f8" exitCode=0 Oct 02 21:36:02 crc kubenswrapper[4636]: I1002 21:36:02.629311 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" event={"ID":"65a4377d-c810-430b-ba89-cd89d5f68250","Type":"ContainerDied","Data":"472e621834fa1b21e61c1d3f5dc94d2abdad6b8179363fb2b8cae1d9791548f8"} Oct 02 21:36:02 crc kubenswrapper[4636]: I1002 21:36:02.629387 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" event={"ID":"65a4377d-c810-430b-ba89-cd89d5f68250","Type":"ContainerStarted","Data":"1d51b52997fd933b3e90621b05fe3bd5a3372e03e188b5e519735c2abfd2945a"} Oct 02 21:36:04 crc kubenswrapper[4636]: I1002 21:36:04.642912 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" event={"ID":"65a4377d-c810-430b-ba89-cd89d5f68250","Type":"ContainerDied","Data":"b935ea079e158263bc77e22f8f62c6c929ebd0e34d201c763e714cdab7122b31"} Oct 02 21:36:04 crc kubenswrapper[4636]: I1002 21:36:04.642922 4636 generic.go:334] "Generic (PLEG): container finished" podID="65a4377d-c810-430b-ba89-cd89d5f68250" containerID="b935ea079e158263bc77e22f8f62c6c929ebd0e34d201c763e714cdab7122b31" exitCode=0 Oct 02 21:36:05 crc kubenswrapper[4636]: I1002 21:36:05.650680 4636 generic.go:334] "Generic (PLEG): container finished" podID="65a4377d-c810-430b-ba89-cd89d5f68250" containerID="fc12e412faba0a2427b917ca4b8313d1d3bf53dbfe5d2b9f6280097231c8adbf" exitCode=0 Oct 02 21:36:05 crc kubenswrapper[4636]: I1002 21:36:05.650718 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" event={"ID":"65a4377d-c810-430b-ba89-cd89d5f68250","Type":"ContainerDied","Data":"fc12e412faba0a2427b917ca4b8313d1d3bf53dbfe5d2b9f6280097231c8adbf"} Oct 02 21:36:06 crc kubenswrapper[4636]: I1002 21:36:06.877929 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.079989 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-bundle\") pod \"65a4377d-c810-430b-ba89-cd89d5f68250\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.080047 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-util\") pod \"65a4377d-c810-430b-ba89-cd89d5f68250\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.080116 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drtfr\" (UniqueName: \"kubernetes.io/projected/65a4377d-c810-430b-ba89-cd89d5f68250-kube-api-access-drtfr\") pod \"65a4377d-c810-430b-ba89-cd89d5f68250\" (UID: \"65a4377d-c810-430b-ba89-cd89d5f68250\") " Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.080914 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-bundle" (OuterVolumeSpecName: "bundle") pod "65a4377d-c810-430b-ba89-cd89d5f68250" (UID: "65a4377d-c810-430b-ba89-cd89d5f68250"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.081053 4636 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.084646 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65a4377d-c810-430b-ba89-cd89d5f68250-kube-api-access-drtfr" (OuterVolumeSpecName: "kube-api-access-drtfr") pod "65a4377d-c810-430b-ba89-cd89d5f68250" (UID: "65a4377d-c810-430b-ba89-cd89d5f68250"). InnerVolumeSpecName "kube-api-access-drtfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.111683 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-util" (OuterVolumeSpecName: "util") pod "65a4377d-c810-430b-ba89-cd89d5f68250" (UID: "65a4377d-c810-430b-ba89-cd89d5f68250"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.182873 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drtfr\" (UniqueName: \"kubernetes.io/projected/65a4377d-c810-430b-ba89-cd89d5f68250-kube-api-access-drtfr\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.182951 4636 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/65a4377d-c810-430b-ba89-cd89d5f68250-util\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.663954 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" event={"ID":"65a4377d-c810-430b-ba89-cd89d5f68250","Type":"ContainerDied","Data":"1d51b52997fd933b3e90621b05fe3bd5a3372e03e188b5e519735c2abfd2945a"} Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.664023 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d51b52997fd933b3e90621b05fe3bd5a3372e03e188b5e519735c2abfd2945a" Oct 02 21:36:07 crc kubenswrapper[4636]: I1002 21:36:07.664047 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.719531 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-6qt2c"] Oct 02 21:36:08 crc kubenswrapper[4636]: E1002 21:36:08.719979 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a4377d-c810-430b-ba89-cd89d5f68250" containerName="extract" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.719991 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a4377d-c810-430b-ba89-cd89d5f68250" containerName="extract" Oct 02 21:36:08 crc kubenswrapper[4636]: E1002 21:36:08.720002 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a4377d-c810-430b-ba89-cd89d5f68250" containerName="util" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.720008 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a4377d-c810-430b-ba89-cd89d5f68250" containerName="util" Oct 02 21:36:08 crc kubenswrapper[4636]: E1002 21:36:08.720029 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a4377d-c810-430b-ba89-cd89d5f68250" containerName="pull" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.720042 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a4377d-c810-430b-ba89-cd89d5f68250" containerName="pull" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.720128 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="65a4377d-c810-430b-ba89-cd89d5f68250" containerName="extract" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.720478 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-6qt2c" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.722171 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.723050 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-lf2km" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.726588 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.733508 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-6qt2c"] Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.802199 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kx8s\" (UniqueName: \"kubernetes.io/projected/51900dd3-88ff-4e2a-bda7-fa8350f5f2d7-kube-api-access-6kx8s\") pod \"nmstate-operator-858ddd8f98-6qt2c\" (UID: \"51900dd3-88ff-4e2a-bda7-fa8350f5f2d7\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-6qt2c" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.903748 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kx8s\" (UniqueName: \"kubernetes.io/projected/51900dd3-88ff-4e2a-bda7-fa8350f5f2d7-kube-api-access-6kx8s\") pod \"nmstate-operator-858ddd8f98-6qt2c\" (UID: \"51900dd3-88ff-4e2a-bda7-fa8350f5f2d7\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-6qt2c" Oct 02 21:36:08 crc kubenswrapper[4636]: I1002 21:36:08.922499 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kx8s\" (UniqueName: \"kubernetes.io/projected/51900dd3-88ff-4e2a-bda7-fa8350f5f2d7-kube-api-access-6kx8s\") pod \"nmstate-operator-858ddd8f98-6qt2c\" (UID: \"51900dd3-88ff-4e2a-bda7-fa8350f5f2d7\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-6qt2c" Oct 02 21:36:09 crc kubenswrapper[4636]: I1002 21:36:09.050203 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-6qt2c" Oct 02 21:36:09 crc kubenswrapper[4636]: I1002 21:36:09.511061 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-6qt2c"] Oct 02 21:36:09 crc kubenswrapper[4636]: I1002 21:36:09.673896 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-6qt2c" event={"ID":"51900dd3-88ff-4e2a-bda7-fa8350f5f2d7","Type":"ContainerStarted","Data":"6f086511fdc983d429a5af64f1dff1dd96a8530a12a2d854110bc503d3178d54"} Oct 02 21:36:10 crc kubenswrapper[4636]: I1002 21:36:10.673892 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rklgn"] Oct 02 21:36:10 crc kubenswrapper[4636]: I1002 21:36:10.675485 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" podUID="ffb6abfd-3376-4908-b388-7c398e36f986" containerName="controller-manager" containerID="cri-o://26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d" gracePeriod=30 Oct 02 21:36:10 crc kubenswrapper[4636]: I1002 21:36:10.761576 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p"] Oct 02 21:36:10 crc kubenswrapper[4636]: I1002 21:36:10.761828 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" podUID="b2766a44-24c7-48ff-943f-3a225eb74dec" containerName="route-controller-manager" containerID="cri-o://c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b" gracePeriod=30 Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.076572 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.132661 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-proxy-ca-bundles\") pod \"ffb6abfd-3376-4908-b388-7c398e36f986\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.132715 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6g99\" (UniqueName: \"kubernetes.io/projected/ffb6abfd-3376-4908-b388-7c398e36f986-kube-api-access-x6g99\") pod \"ffb6abfd-3376-4908-b388-7c398e36f986\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.132738 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-config\") pod \"ffb6abfd-3376-4908-b388-7c398e36f986\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.132790 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffb6abfd-3376-4908-b388-7c398e36f986-serving-cert\") pod \"ffb6abfd-3376-4908-b388-7c398e36f986\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.132812 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-client-ca\") pod \"ffb6abfd-3376-4908-b388-7c398e36f986\" (UID: \"ffb6abfd-3376-4908-b388-7c398e36f986\") " Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.133914 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "ffb6abfd-3376-4908-b388-7c398e36f986" (UID: "ffb6abfd-3376-4908-b388-7c398e36f986"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.134463 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-config" (OuterVolumeSpecName: "config") pod "ffb6abfd-3376-4908-b388-7c398e36f986" (UID: "ffb6abfd-3376-4908-b388-7c398e36f986"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.137101 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-client-ca" (OuterVolumeSpecName: "client-ca") pod "ffb6abfd-3376-4908-b388-7c398e36f986" (UID: "ffb6abfd-3376-4908-b388-7c398e36f986"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.142191 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffb6abfd-3376-4908-b388-7c398e36f986-kube-api-access-x6g99" (OuterVolumeSpecName: "kube-api-access-x6g99") pod "ffb6abfd-3376-4908-b388-7c398e36f986" (UID: "ffb6abfd-3376-4908-b388-7c398e36f986"). InnerVolumeSpecName "kube-api-access-x6g99". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.144795 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffb6abfd-3376-4908-b388-7c398e36f986-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ffb6abfd-3376-4908-b388-7c398e36f986" (UID: "ffb6abfd-3376-4908-b388-7c398e36f986"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.186406 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz"] Oct 02 21:36:11 crc kubenswrapper[4636]: E1002 21:36:11.186660 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb6abfd-3376-4908-b388-7c398e36f986" containerName="controller-manager" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.186673 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb6abfd-3376-4908-b388-7c398e36f986" containerName="controller-manager" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.186789 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffb6abfd-3376-4908-b388-7c398e36f986" containerName="controller-manager" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.187205 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.203288 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz"] Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.233784 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdp6d\" (UniqueName: \"kubernetes.io/projected/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-kube-api-access-kdp6d\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.233828 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-serving-cert\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.233849 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-client-ca\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.233874 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-proxy-ca-bundles\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.233914 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-config\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.233952 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6g99\" (UniqueName: \"kubernetes.io/projected/ffb6abfd-3376-4908-b388-7c398e36f986-kube-api-access-x6g99\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.233963 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.233971 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffb6abfd-3376-4908-b388-7c398e36f986-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.233979 4636 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-client-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.233989 4636 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ffb6abfd-3376-4908-b388-7c398e36f986-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.242006 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.335035 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2766a44-24c7-48ff-943f-3a225eb74dec-serving-cert\") pod \"b2766a44-24c7-48ff-943f-3a225eb74dec\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.335085 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-config\") pod \"b2766a44-24c7-48ff-943f-3a225eb74dec\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.335104 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-client-ca\") pod \"b2766a44-24c7-48ff-943f-3a225eb74dec\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.335922 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzd7b\" (UniqueName: \"kubernetes.io/projected/b2766a44-24c7-48ff-943f-3a225eb74dec-kube-api-access-bzd7b\") pod \"b2766a44-24c7-48ff-943f-3a225eb74dec\" (UID: \"b2766a44-24c7-48ff-943f-3a225eb74dec\") " Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.335815 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-client-ca" (OuterVolumeSpecName: "client-ca") pod "b2766a44-24c7-48ff-943f-3a225eb74dec" (UID: "b2766a44-24c7-48ff-943f-3a225eb74dec"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.335952 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-config" (OuterVolumeSpecName: "config") pod "b2766a44-24c7-48ff-943f-3a225eb74dec" (UID: "b2766a44-24c7-48ff-943f-3a225eb74dec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.336096 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-config\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.336165 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdp6d\" (UniqueName: \"kubernetes.io/projected/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-kube-api-access-kdp6d\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.336196 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-serving-cert\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.336222 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-client-ca\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.336257 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-proxy-ca-bundles\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.336310 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.336325 4636 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b2766a44-24c7-48ff-943f-3a225eb74dec-client-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.337262 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-proxy-ca-bundles\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.339386 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2766a44-24c7-48ff-943f-3a225eb74dec-kube-api-access-bzd7b" (OuterVolumeSpecName: "kube-api-access-bzd7b") pod "b2766a44-24c7-48ff-943f-3a225eb74dec" (UID: "b2766a44-24c7-48ff-943f-3a225eb74dec"). InnerVolumeSpecName "kube-api-access-bzd7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.339470 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-client-ca\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.339741 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-config\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.341325 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2766a44-24c7-48ff-943f-3a225eb74dec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b2766a44-24c7-48ff-943f-3a225eb74dec" (UID: "b2766a44-24c7-48ff-943f-3a225eb74dec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.341953 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-serving-cert\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.358433 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdp6d\" (UniqueName: \"kubernetes.io/projected/74590a19-b0ab-4ab8-83a6-870c9b8eb13a-kube-api-access-kdp6d\") pod \"controller-manager-6f5786bbc7-jgfwz\" (UID: \"74590a19-b0ab-4ab8-83a6-870c9b8eb13a\") " pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.437586 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzd7b\" (UniqueName: \"kubernetes.io/projected/b2766a44-24c7-48ff-943f-3a225eb74dec-kube-api-access-bzd7b\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.437622 4636 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2766a44-24c7-48ff-943f-3a225eb74dec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.517595 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.689419 4636 generic.go:334] "Generic (PLEG): container finished" podID="ffb6abfd-3376-4908-b388-7c398e36f986" containerID="26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d" exitCode=0 Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.689469 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.689503 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" event={"ID":"ffb6abfd-3376-4908-b388-7c398e36f986","Type":"ContainerDied","Data":"26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d"} Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.689958 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rklgn" event={"ID":"ffb6abfd-3376-4908-b388-7c398e36f986","Type":"ContainerDied","Data":"d215aef1ee352044d2bec54b724de00649e5f5d99e021416e8e4b23e406adf6f"} Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.689977 4636 scope.go:117] "RemoveContainer" containerID="26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.708154 4636 generic.go:334] "Generic (PLEG): container finished" podID="b2766a44-24c7-48ff-943f-3a225eb74dec" containerID="c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b" exitCode=0 Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.708191 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" event={"ID":"b2766a44-24c7-48ff-943f-3a225eb74dec","Type":"ContainerDied","Data":"c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b"} Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.708209 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.708217 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p" event={"ID":"b2766a44-24c7-48ff-943f-3a225eb74dec","Type":"ContainerDied","Data":"51834171d1cf69c63339c42bda8a6f1ed12f45ef9fa3e94ed419be3505f2d7d1"} Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.718894 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rklgn"] Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.722242 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rklgn"] Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.731416 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p"] Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.735835 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9759p"] Oct 02 21:36:11 crc kubenswrapper[4636]: I1002 21:36:11.931115 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz"] Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.183575 4636 scope.go:117] "RemoveContainer" containerID="26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d" Oct 02 21:36:12 crc kubenswrapper[4636]: E1002 21:36:12.186371 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d\": container with ID starting with 26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d not found: ID does not exist" containerID="26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.186413 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d"} err="failed to get container status \"26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d\": rpc error: code = NotFound desc = could not find container \"26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d\": container with ID starting with 26c60d64a641576becf3ff75e77985476e1955fe0a41929078a46b14e191372d not found: ID does not exist" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.186439 4636 scope.go:117] "RemoveContainer" containerID="c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.229599 4636 scope.go:117] "RemoveContainer" containerID="c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b" Oct 02 21:36:12 crc kubenswrapper[4636]: E1002 21:36:12.230105 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b\": container with ID starting with c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b not found: ID does not exist" containerID="c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.230177 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b"} err="failed to get container status \"c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b\": rpc error: code = NotFound desc = could not find container \"c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b\": container with ID starting with c098f42800b30d018dd0ce51b9c01d3affbb074b2c201f72467315778bb9fe2b not found: ID does not exist" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.284925 4636 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.459603 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks"] Oct 02 21:36:12 crc kubenswrapper[4636]: E1002 21:36:12.460002 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2766a44-24c7-48ff-943f-3a225eb74dec" containerName="route-controller-manager" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.460015 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2766a44-24c7-48ff-943f-3a225eb74dec" containerName="route-controller-manager" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.460171 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2766a44-24c7-48ff-943f-3a225eb74dec" containerName="route-controller-manager" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.467826 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.467727 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks"] Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.474117 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.474256 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.474480 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.474808 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.474899 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.475044 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.553400 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c79st\" (UniqueName: \"kubernetes.io/projected/2a29f3bf-36f6-4337-99d1-8ffa3c830891-kube-api-access-c79st\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.553471 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a29f3bf-36f6-4337-99d1-8ffa3c830891-config\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.553495 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2a29f3bf-36f6-4337-99d1-8ffa3c830891-client-ca\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.553524 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a29f3bf-36f6-4337-99d1-8ffa3c830891-serving-cert\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.654943 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a29f3bf-36f6-4337-99d1-8ffa3c830891-config\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.654983 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2a29f3bf-36f6-4337-99d1-8ffa3c830891-client-ca\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.655015 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a29f3bf-36f6-4337-99d1-8ffa3c830891-serving-cert\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.655081 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c79st\" (UniqueName: \"kubernetes.io/projected/2a29f3bf-36f6-4337-99d1-8ffa3c830891-kube-api-access-c79st\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.656089 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2a29f3bf-36f6-4337-99d1-8ffa3c830891-client-ca\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.656254 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a29f3bf-36f6-4337-99d1-8ffa3c830891-config\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.661539 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a29f3bf-36f6-4337-99d1-8ffa3c830891-serving-cert\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.684435 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c79st\" (UniqueName: \"kubernetes.io/projected/2a29f3bf-36f6-4337-99d1-8ffa3c830891-kube-api-access-c79st\") pod \"route-controller-manager-7d9ccf9f97-b6pks\" (UID: \"2a29f3bf-36f6-4337-99d1-8ffa3c830891\") " pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.714318 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" event={"ID":"74590a19-b0ab-4ab8-83a6-870c9b8eb13a","Type":"ContainerStarted","Data":"05f7eceeb8e720231539f3e1cde28f05d4389e7b4036e844096e0153c624a249"} Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.714358 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" event={"ID":"74590a19-b0ab-4ab8-83a6-870c9b8eb13a","Type":"ContainerStarted","Data":"ce58e9414e549616b25fd64760805509e46702b7887b1c28c7395108f5b219e3"} Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.715308 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.716383 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-6qt2c" event={"ID":"51900dd3-88ff-4e2a-bda7-fa8350f5f2d7","Type":"ContainerStarted","Data":"32e27189cf1f31f04d1da505c2ef48af5fbc74889096f7e6c1a4cae8c340e5c1"} Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.726752 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.734365 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6f5786bbc7-jgfwz" podStartSLOduration=1.734351843 podStartE2EDuration="1.734351843s" podCreationTimestamp="2025-10-02 21:36:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:36:12.731781263 +0000 UTC m=+764.054789282" watchObservedRunningTime="2025-10-02 21:36:12.734351843 +0000 UTC m=+764.057359862" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.802340 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:12 crc kubenswrapper[4636]: I1002 21:36:12.809284 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-6qt2c" podStartSLOduration=2.127041453 podStartE2EDuration="4.809260565s" podCreationTimestamp="2025-10-02 21:36:08 +0000 UTC" firstStartedPulling="2025-10-02 21:36:09.549944818 +0000 UTC m=+760.872952837" lastFinishedPulling="2025-10-02 21:36:12.23216392 +0000 UTC m=+763.555171949" observedRunningTime="2025-10-02 21:36:12.802999005 +0000 UTC m=+764.126007024" watchObservedRunningTime="2025-10-02 21:36:12.809260565 +0000 UTC m=+764.132268584" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.223418 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks"] Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.610222 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2766a44-24c7-48ff-943f-3a225eb74dec" path="/var/lib/kubelet/pods/b2766a44-24c7-48ff-943f-3a225eb74dec/volumes" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.611113 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffb6abfd-3376-4908-b388-7c398e36f986" path="/var/lib/kubelet/pods/ffb6abfd-3376-4908-b388-7c398e36f986/volumes" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.724093 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" event={"ID":"2a29f3bf-36f6-4337-99d1-8ffa3c830891","Type":"ContainerStarted","Data":"e77f6d7d7060e6b791e13856892bf0b8aac0d9747b75a31810684942dc091cef"} Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.725180 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" event={"ID":"2a29f3bf-36f6-4337-99d1-8ffa3c830891","Type":"ContainerStarted","Data":"feb880755cc092bdab1bbc9566e7cceb1c024ee430dfced08ccb56c00eed983e"} Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.725258 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.740419 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" podStartSLOduration=2.740407754 podStartE2EDuration="2.740407754s" podCreationTimestamp="2025-10-02 21:36:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:36:13.738911843 +0000 UTC m=+765.061919862" watchObservedRunningTime="2025-10-02 21:36:13.740407754 +0000 UTC m=+765.063415773" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.798364 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8"] Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.799343 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.801279 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-fbv9v" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.822113 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8"] Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.826698 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl"] Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.827581 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.830244 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.848532 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-snqg8"] Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.849535 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.859104 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl"] Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.869571 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drwf9\" (UniqueName: \"kubernetes.io/projected/e03267b6-7c4a-419a-b291-aedff26bd214-kube-api-access-drwf9\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.870216 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/28949b9c-5546-47a7-bbfe-8263cdc11841-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-nr6nl\" (UID: \"28949b9c-5546-47a7-bbfe-8263cdc11841\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.870242 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/e03267b6-7c4a-419a-b291-aedff26bd214-dbus-socket\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.870280 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/e03267b6-7c4a-419a-b291-aedff26bd214-nmstate-lock\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.870296 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/e03267b6-7c4a-419a-b291-aedff26bd214-ovs-socket\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.870312 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g99k\" (UniqueName: \"kubernetes.io/projected/f01c7e1f-9050-4b45-9f9a-18ca3dc9b5da-kube-api-access-5g99k\") pod \"nmstate-metrics-fdff9cb8d-mfcf8\" (UID: \"f01c7e1f-9050-4b45-9f9a-18ca3dc9b5da\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.870337 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lj92x\" (UniqueName: \"kubernetes.io/projected/28949b9c-5546-47a7-bbfe-8263cdc11841-kube-api-access-lj92x\") pod \"nmstate-webhook-6cdbc54649-nr6nl\" (UID: \"28949b9c-5546-47a7-bbfe-8263cdc11841\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.882078 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7d9ccf9f97-b6pks" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.970989 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g99k\" (UniqueName: \"kubernetes.io/projected/f01c7e1f-9050-4b45-9f9a-18ca3dc9b5da-kube-api-access-5g99k\") pod \"nmstate-metrics-fdff9cb8d-mfcf8\" (UID: \"f01c7e1f-9050-4b45-9f9a-18ca3dc9b5da\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.971224 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/e03267b6-7c4a-419a-b291-aedff26bd214-nmstate-lock\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.971298 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/e03267b6-7c4a-419a-b291-aedff26bd214-ovs-socket\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.971388 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lj92x\" (UniqueName: \"kubernetes.io/projected/28949b9c-5546-47a7-bbfe-8263cdc11841-kube-api-access-lj92x\") pod \"nmstate-webhook-6cdbc54649-nr6nl\" (UID: \"28949b9c-5546-47a7-bbfe-8263cdc11841\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.971485 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drwf9\" (UniqueName: \"kubernetes.io/projected/e03267b6-7c4a-419a-b291-aedff26bd214-kube-api-access-drwf9\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.971559 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/28949b9c-5546-47a7-bbfe-8263cdc11841-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-nr6nl\" (UID: \"28949b9c-5546-47a7-bbfe-8263cdc11841\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.971620 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/e03267b6-7c4a-419a-b291-aedff26bd214-dbus-socket\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: E1002 21:36:13.971689 4636 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.971354 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/e03267b6-7c4a-419a-b291-aedff26bd214-ovs-socket\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: E1002 21:36:13.971765 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28949b9c-5546-47a7-bbfe-8263cdc11841-tls-key-pair podName:28949b9c-5546-47a7-bbfe-8263cdc11841 nodeName:}" failed. No retries permitted until 2025-10-02 21:36:14.471737421 +0000 UTC m=+765.794745450 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/28949b9c-5546-47a7-bbfe-8263cdc11841-tls-key-pair") pod "nmstate-webhook-6cdbc54649-nr6nl" (UID: "28949b9c-5546-47a7-bbfe-8263cdc11841") : secret "openshift-nmstate-webhook" not found Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.971318 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/e03267b6-7c4a-419a-b291-aedff26bd214-nmstate-lock\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.971901 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/e03267b6-7c4a-419a-b291-aedff26bd214-dbus-socket\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.975319 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn"] Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.982386 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.985016 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.985039 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 02 21:36:13 crc kubenswrapper[4636]: I1002 21:36:13.988295 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-s8vf6" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.004555 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn"] Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.022296 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drwf9\" (UniqueName: \"kubernetes.io/projected/e03267b6-7c4a-419a-b291-aedff26bd214-kube-api-access-drwf9\") pod \"nmstate-handler-snqg8\" (UID: \"e03267b6-7c4a-419a-b291-aedff26bd214\") " pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.029446 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g99k\" (UniqueName: \"kubernetes.io/projected/f01c7e1f-9050-4b45-9f9a-18ca3dc9b5da-kube-api-access-5g99k\") pod \"nmstate-metrics-fdff9cb8d-mfcf8\" (UID: \"f01c7e1f-9050-4b45-9f9a-18ca3dc9b5da\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.059525 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lj92x\" (UniqueName: \"kubernetes.io/projected/28949b9c-5546-47a7-bbfe-8263cdc11841-kube-api-access-lj92x\") pod \"nmstate-webhook-6cdbc54649-nr6nl\" (UID: \"28949b9c-5546-47a7-bbfe-8263cdc11841\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.072555 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzc49\" (UniqueName: \"kubernetes.io/projected/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-kube-api-access-gzc49\") pod \"nmstate-console-plugin-6b874cbd85-6c8fn\" (UID: \"550f107b-cd3d-4c84-a0cb-edf8e0c62db8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.072814 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-6c8fn\" (UID: \"550f107b-cd3d-4c84-a0cb-edf8e0c62db8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.072953 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-6c8fn\" (UID: \"550f107b-cd3d-4c84-a0cb-edf8e0c62db8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.111201 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.161349 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.184369 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzc49\" (UniqueName: \"kubernetes.io/projected/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-kube-api-access-gzc49\") pod \"nmstate-console-plugin-6b874cbd85-6c8fn\" (UID: \"550f107b-cd3d-4c84-a0cb-edf8e0c62db8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.184425 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-6c8fn\" (UID: \"550f107b-cd3d-4c84-a0cb-edf8e0c62db8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.184476 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-6c8fn\" (UID: \"550f107b-cd3d-4c84-a0cb-edf8e0c62db8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:14 crc kubenswrapper[4636]: E1002 21:36:14.185157 4636 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Oct 02 21:36:14 crc kubenswrapper[4636]: E1002 21:36:14.185287 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-plugin-serving-cert podName:550f107b-cd3d-4c84-a0cb-edf8e0c62db8 nodeName:}" failed. No retries permitted until 2025-10-02 21:36:14.685267743 +0000 UTC m=+766.008275762 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-plugin-serving-cert") pod "nmstate-console-plugin-6b874cbd85-6c8fn" (UID: "550f107b-cd3d-4c84-a0cb-edf8e0c62db8") : secret "plugin-serving-cert" not found Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.185360 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-6c8fn\" (UID: \"550f107b-cd3d-4c84-a0cb-edf8e0c62db8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.211618 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzc49\" (UniqueName: \"kubernetes.io/projected/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-kube-api-access-gzc49\") pod \"nmstate-console-plugin-6b874cbd85-6c8fn\" (UID: \"550f107b-cd3d-4c84-a0cb-edf8e0c62db8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.488492 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/28949b9c-5546-47a7-bbfe-8263cdc11841-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-nr6nl\" (UID: \"28949b9c-5546-47a7-bbfe-8263cdc11841\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.496068 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/28949b9c-5546-47a7-bbfe-8263cdc11841-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-nr6nl\" (UID: \"28949b9c-5546-47a7-bbfe-8263cdc11841\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.615856 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8"] Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.691425 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-6c8fn\" (UID: \"550f107b-cd3d-4c84-a0cb-edf8e0c62db8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.695046 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/550f107b-cd3d-4c84-a0cb-edf8e0c62db8-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-6c8fn\" (UID: \"550f107b-cd3d-4c84-a0cb-edf8e0c62db8\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.730491 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-snqg8" event={"ID":"e03267b6-7c4a-419a-b291-aedff26bd214","Type":"ContainerStarted","Data":"51685ea8aca56d968fb5661390de76a5686e1087e6489fdca86dc39cd052f89d"} Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.732207 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8" event={"ID":"f01c7e1f-9050-4b45-9f9a-18ca3dc9b5da","Type":"ContainerStarted","Data":"c4e3096713440a8f52b45f7d78b1836056b7851c2c3bb1b6c2048b335fcdf478"} Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.743133 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.849163 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-86f477f8bf-wr9fv"] Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.851470 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.863869 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-86f477f8bf-wr9fv"] Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.893862 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjm22\" (UniqueName: \"kubernetes.io/projected/8945ed7c-679e-42c2-adc9-6de357afe0fe-kube-api-access-fjm22\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.894165 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-service-ca\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.894250 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-oauth-serving-cert\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.894334 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8945ed7c-679e-42c2-adc9-6de357afe0fe-console-oauth-config\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.894397 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-trusted-ca-bundle\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.894464 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8945ed7c-679e-42c2-adc9-6de357afe0fe-console-serving-cert\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.894529 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-console-config\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:14 crc kubenswrapper[4636]: I1002 21:36:14.894722 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.028163 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-service-ca\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.028203 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-oauth-serving-cert\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.028244 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8945ed7c-679e-42c2-adc9-6de357afe0fe-console-oauth-config\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.028261 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-trusted-ca-bundle\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.028281 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8945ed7c-679e-42c2-adc9-6de357afe0fe-console-serving-cert\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.028299 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-console-config\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.028323 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjm22\" (UniqueName: \"kubernetes.io/projected/8945ed7c-679e-42c2-adc9-6de357afe0fe-kube-api-access-fjm22\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.029432 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-service-ca\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.030004 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-oauth-serving-cert\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.033095 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8945ed7c-679e-42c2-adc9-6de357afe0fe-console-oauth-config\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.033211 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8945ed7c-679e-42c2-adc9-6de357afe0fe-console-serving-cert\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.033614 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-console-config\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.034486 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8945ed7c-679e-42c2-adc9-6de357afe0fe-trusted-ca-bundle\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.051468 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjm22\" (UniqueName: \"kubernetes.io/projected/8945ed7c-679e-42c2-adc9-6de357afe0fe-kube-api-access-fjm22\") pod \"console-86f477f8bf-wr9fv\" (UID: \"8945ed7c-679e-42c2-adc9-6de357afe0fe\") " pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.150350 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl"] Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.186385 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.349396 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn"] Oct 02 21:36:15 crc kubenswrapper[4636]: W1002 21:36:15.354620 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod550f107b_cd3d_4c84_a0cb_edf8e0c62db8.slice/crio-cd7c1bcc4549dd656d14ab96a141675eebb7db65bd9dd590a127cba211c8fb81 WatchSource:0}: Error finding container cd7c1bcc4549dd656d14ab96a141675eebb7db65bd9dd590a127cba211c8fb81: Status 404 returned error can't find the container with id cd7c1bcc4549dd656d14ab96a141675eebb7db65bd9dd590a127cba211c8fb81 Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.594495 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-86f477f8bf-wr9fv"] Oct 02 21:36:15 crc kubenswrapper[4636]: W1002 21:36:15.595664 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8945ed7c_679e_42c2_adc9_6de357afe0fe.slice/crio-4b7eb26b3c4a694484bc8c4e0babe9e6d4bc7b8ac33e4b5b095afcc0d96dd7f0 WatchSource:0}: Error finding container 4b7eb26b3c4a694484bc8c4e0babe9e6d4bc7b8ac33e4b5b095afcc0d96dd7f0: Status 404 returned error can't find the container with id 4b7eb26b3c4a694484bc8c4e0babe9e6d4bc7b8ac33e4b5b095afcc0d96dd7f0 Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.737766 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-86f477f8bf-wr9fv" event={"ID":"8945ed7c-679e-42c2-adc9-6de357afe0fe","Type":"ContainerStarted","Data":"25b6cefb720da565694d5e46fa71070b6fbc4e10fecb7802028419833b224c7d"} Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.738107 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-86f477f8bf-wr9fv" event={"ID":"8945ed7c-679e-42c2-adc9-6de357afe0fe","Type":"ContainerStarted","Data":"4b7eb26b3c4a694484bc8c4e0babe9e6d4bc7b8ac33e4b5b095afcc0d96dd7f0"} Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.738562 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" event={"ID":"28949b9c-5546-47a7-bbfe-8263cdc11841","Type":"ContainerStarted","Data":"07e7f90c81e4c5536933d6b330b2bb0611bef594ed30fe3429911f01f026091b"} Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.739219 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" event={"ID":"550f107b-cd3d-4c84-a0cb-edf8e0c62db8","Type":"ContainerStarted","Data":"cd7c1bcc4549dd656d14ab96a141675eebb7db65bd9dd590a127cba211c8fb81"} Oct 02 21:36:15 crc kubenswrapper[4636]: I1002 21:36:15.762250 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-86f477f8bf-wr9fv" podStartSLOduration=1.76223544 podStartE2EDuration="1.76223544s" podCreationTimestamp="2025-10-02 21:36:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:36:15.760437271 +0000 UTC m=+767.083445310" watchObservedRunningTime="2025-10-02 21:36:15.76223544 +0000 UTC m=+767.085243459" Oct 02 21:36:17 crc kubenswrapper[4636]: I1002 21:36:17.751796 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" event={"ID":"28949b9c-5546-47a7-bbfe-8263cdc11841","Type":"ContainerStarted","Data":"0fb23c577731a94738404cf07e69e94d4fe7c72a7071dddbfec54cef13a9c70e"} Oct 02 21:36:17 crc kubenswrapper[4636]: I1002 21:36:17.752271 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" Oct 02 21:36:17 crc kubenswrapper[4636]: I1002 21:36:17.755808 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8" event={"ID":"f01c7e1f-9050-4b45-9f9a-18ca3dc9b5da","Type":"ContainerStarted","Data":"714a74d365b6500505ed00af09801e58966fc505170efa9c2825dfc64a7ae393"} Oct 02 21:36:18 crc kubenswrapper[4636]: I1002 21:36:18.767729 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-snqg8" event={"ID":"e03267b6-7c4a-419a-b291-aedff26bd214","Type":"ContainerStarted","Data":"9e6b9548543f650617eeb73033b4074bdba749b8fd0f09ae374e3dda500a20c4"} Oct 02 21:36:18 crc kubenswrapper[4636]: I1002 21:36:18.768110 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:18 crc kubenswrapper[4636]: I1002 21:36:18.789470 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-snqg8" podStartSLOduration=2.490828981 podStartE2EDuration="5.78945601s" podCreationTimestamp="2025-10-02 21:36:13 +0000 UTC" firstStartedPulling="2025-10-02 21:36:14.207477999 +0000 UTC m=+765.530486018" lastFinishedPulling="2025-10-02 21:36:17.506105028 +0000 UTC m=+768.829113047" observedRunningTime="2025-10-02 21:36:18.787926258 +0000 UTC m=+770.110934287" watchObservedRunningTime="2025-10-02 21:36:18.78945601 +0000 UTC m=+770.112464029" Oct 02 21:36:18 crc kubenswrapper[4636]: I1002 21:36:18.794116 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" podStartSLOduration=3.450984901 podStartE2EDuration="5.794084436s" podCreationTimestamp="2025-10-02 21:36:13 +0000 UTC" firstStartedPulling="2025-10-02 21:36:15.163019053 +0000 UTC m=+766.486027072" lastFinishedPulling="2025-10-02 21:36:17.506118588 +0000 UTC m=+768.829126607" observedRunningTime="2025-10-02 21:36:17.77059729 +0000 UTC m=+769.093605309" watchObservedRunningTime="2025-10-02 21:36:18.794084436 +0000 UTC m=+770.117092465" Oct 02 21:36:19 crc kubenswrapper[4636]: I1002 21:36:19.777568 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" event={"ID":"550f107b-cd3d-4c84-a0cb-edf8e0c62db8","Type":"ContainerStarted","Data":"7d21ea47570fdc2fdf69b29468c267af109640cd52d8dbb10e3ad362880de00c"} Oct 02 21:36:19 crc kubenswrapper[4636]: I1002 21:36:19.795605 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-6c8fn" podStartSLOduration=3.515697634 podStartE2EDuration="6.795585612s" podCreationTimestamp="2025-10-02 21:36:13 +0000 UTC" firstStartedPulling="2025-10-02 21:36:15.356981441 +0000 UTC m=+766.679989460" lastFinishedPulling="2025-10-02 21:36:18.636869419 +0000 UTC m=+769.959877438" observedRunningTime="2025-10-02 21:36:19.791699946 +0000 UTC m=+771.114707965" watchObservedRunningTime="2025-10-02 21:36:19.795585612 +0000 UTC m=+771.118593631" Oct 02 21:36:20 crc kubenswrapper[4636]: I1002 21:36:20.785832 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8" event={"ID":"f01c7e1f-9050-4b45-9f9a-18ca3dc9b5da","Type":"ContainerStarted","Data":"0743b8dd8b4749b1958a8138ed578a7232742898230cae9ff283a17e28c9316d"} Oct 02 21:36:20 crc kubenswrapper[4636]: I1002 21:36:20.814544 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-mfcf8" podStartSLOduration=2.316297242 podStartE2EDuration="7.814515384s" podCreationTimestamp="2025-10-02 21:36:13 +0000 UTC" firstStartedPulling="2025-10-02 21:36:14.62708207 +0000 UTC m=+765.950090099" lastFinishedPulling="2025-10-02 21:36:20.125300222 +0000 UTC m=+771.448308241" observedRunningTime="2025-10-02 21:36:20.810531295 +0000 UTC m=+772.133539394" watchObservedRunningTime="2025-10-02 21:36:20.814515384 +0000 UTC m=+772.137523433" Oct 02 21:36:23 crc kubenswrapper[4636]: I1002 21:36:23.118154 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:36:23 crc kubenswrapper[4636]: I1002 21:36:23.118627 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:36:23 crc kubenswrapper[4636]: I1002 21:36:23.118693 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:36:23 crc kubenswrapper[4636]: I1002 21:36:23.119819 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ab44b8ce55b9d49cf042f8f75e697442d8007223e48a516b488f1007f0a6409a"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 21:36:23 crc kubenswrapper[4636]: I1002 21:36:23.119949 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://ab44b8ce55b9d49cf042f8f75e697442d8007223e48a516b488f1007f0a6409a" gracePeriod=600 Oct 02 21:36:23 crc kubenswrapper[4636]: I1002 21:36:23.813860 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="ab44b8ce55b9d49cf042f8f75e697442d8007223e48a516b488f1007f0a6409a" exitCode=0 Oct 02 21:36:23 crc kubenswrapper[4636]: I1002 21:36:23.814045 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"ab44b8ce55b9d49cf042f8f75e697442d8007223e48a516b488f1007f0a6409a"} Oct 02 21:36:23 crc kubenswrapper[4636]: I1002 21:36:23.814300 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"9f0c7c91411fb3c1501fae8b5053b828ebe95e83f4048c87988a5b7f03a27fd8"} Oct 02 21:36:23 crc kubenswrapper[4636]: I1002 21:36:23.814327 4636 scope.go:117] "RemoveContainer" containerID="29a2a6f09d5129082985273f8dec3b68fd722674f2b01916004ca2855f54c75b" Oct 02 21:36:24 crc kubenswrapper[4636]: I1002 21:36:24.197427 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-snqg8" Oct 02 21:36:25 crc kubenswrapper[4636]: I1002 21:36:25.186944 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:25 crc kubenswrapper[4636]: I1002 21:36:25.187552 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:25 crc kubenswrapper[4636]: I1002 21:36:25.193945 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:25 crc kubenswrapper[4636]: I1002 21:36:25.833164 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-86f477f8bf-wr9fv" Oct 02 21:36:25 crc kubenswrapper[4636]: I1002 21:36:25.896630 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-wdwwc"] Oct 02 21:36:34 crc kubenswrapper[4636]: I1002 21:36:34.754573 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-nr6nl" Oct 02 21:36:46 crc kubenswrapper[4636]: I1002 21:36:46.883974 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d"] Oct 02 21:36:46 crc kubenswrapper[4636]: I1002 21:36:46.886415 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:46 crc kubenswrapper[4636]: I1002 21:36:46.888425 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 02 21:36:46 crc kubenswrapper[4636]: I1002 21:36:46.896039 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d"] Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.012409 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.012486 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tv24\" (UniqueName: \"kubernetes.io/projected/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-kube-api-access-2tv24\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.012531 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.113722 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.113819 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tv24\" (UniqueName: \"kubernetes.io/projected/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-kube-api-access-2tv24\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.113847 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.114697 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.114715 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.130742 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tv24\" (UniqueName: \"kubernetes.io/projected/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-kube-api-access-2tv24\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.202276 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.584333 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d"] Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.969473 4636 generic.go:334] "Generic (PLEG): container finished" podID="96db42c7-fb6d-4f65-8618-005ed4e4f8fc" containerID="8d80beeb417c622481a1872318f78a9d6c814c2be42f51ea93df4642f7528f4f" exitCode=0 Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.969676 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" event={"ID":"96db42c7-fb6d-4f65-8618-005ed4e4f8fc","Type":"ContainerDied","Data":"8d80beeb417c622481a1872318f78a9d6c814c2be42f51ea93df4642f7528f4f"} Oct 02 21:36:47 crc kubenswrapper[4636]: I1002 21:36:47.969811 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" event={"ID":"96db42c7-fb6d-4f65-8618-005ed4e4f8fc","Type":"ContainerStarted","Data":"43a79384fe2bc7ac473808418a0dfd07566eef3493ab939325b8058b043da10a"} Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.258373 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2pm2l"] Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.261361 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.269939 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2pm2l"] Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.347895 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-catalog-content\") pod \"redhat-operators-2pm2l\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.347961 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-utilities\") pod \"redhat-operators-2pm2l\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.348089 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chw6m\" (UniqueName: \"kubernetes.io/projected/b3533076-a10f-4e6a-a113-d08a5dcf932c-kube-api-access-chw6m\") pod \"redhat-operators-2pm2l\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.448897 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chw6m\" (UniqueName: \"kubernetes.io/projected/b3533076-a10f-4e6a-a113-d08a5dcf932c-kube-api-access-chw6m\") pod \"redhat-operators-2pm2l\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.449125 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-catalog-content\") pod \"redhat-operators-2pm2l\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.449278 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-utilities\") pod \"redhat-operators-2pm2l\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.449665 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-utilities\") pod \"redhat-operators-2pm2l\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.449665 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-catalog-content\") pod \"redhat-operators-2pm2l\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.468230 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chw6m\" (UniqueName: \"kubernetes.io/projected/b3533076-a10f-4e6a-a113-d08a5dcf932c-kube-api-access-chw6m\") pod \"redhat-operators-2pm2l\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:49 crc kubenswrapper[4636]: I1002 21:36:49.595025 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:50 crc kubenswrapper[4636]: I1002 21:36:50.014911 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2pm2l"] Oct 02 21:36:50 crc kubenswrapper[4636]: I1002 21:36:50.947082 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-wdwwc" podUID="b4ddd281-18ba-41ec-b5a6-788b9f5a942e" containerName="console" containerID="cri-o://b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3" gracePeriod=15 Oct 02 21:36:50 crc kubenswrapper[4636]: I1002 21:36:50.990670 4636 generic.go:334] "Generic (PLEG): container finished" podID="96db42c7-fb6d-4f65-8618-005ed4e4f8fc" containerID="cfcfe3b7e01c566079a2fa12cf2b36750994e41ab10181e6fd40f6c82cb306c5" exitCode=0 Oct 02 21:36:50 crc kubenswrapper[4636]: I1002 21:36:50.990768 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" event={"ID":"96db42c7-fb6d-4f65-8618-005ed4e4f8fc","Type":"ContainerDied","Data":"cfcfe3b7e01c566079a2fa12cf2b36750994e41ab10181e6fd40f6c82cb306c5"} Oct 02 21:36:50 crc kubenswrapper[4636]: I1002 21:36:50.994915 4636 generic.go:334] "Generic (PLEG): container finished" podID="b3533076-a10f-4e6a-a113-d08a5dcf932c" containerID="3b3157d5321eaa0da35f4effff9a5784385a072623d9043cffddcbb3e4456f0f" exitCode=0 Oct 02 21:36:50 crc kubenswrapper[4636]: I1002 21:36:50.994956 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2pm2l" event={"ID":"b3533076-a10f-4e6a-a113-d08a5dcf932c","Type":"ContainerDied","Data":"3b3157d5321eaa0da35f4effff9a5784385a072623d9043cffddcbb3e4456f0f"} Oct 02 21:36:50 crc kubenswrapper[4636]: I1002 21:36:50.994983 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2pm2l" event={"ID":"b3533076-a10f-4e6a-a113-d08a5dcf932c","Type":"ContainerStarted","Data":"9501964f1e1c4459cc6f32caecd35349bd253c1a2b78aa79d466599901883b86"} Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.379349 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-wdwwc_b4ddd281-18ba-41ec-b5a6-788b9f5a942e/console/0.log" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.379675 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.575272 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-serving-cert\") pod \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.575328 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-oauth-serving-cert\") pod \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.575346 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-oauth-config\") pod \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.575370 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-trusted-ca-bundle\") pod \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.575425 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-service-ca\") pod \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.575455 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctr9b\" (UniqueName: \"kubernetes.io/projected/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-kube-api-access-ctr9b\") pod \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.575494 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-config\") pod \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\" (UID: \"b4ddd281-18ba-41ec-b5a6-788b9f5a942e\") " Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.576119 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "b4ddd281-18ba-41ec-b5a6-788b9f5a942e" (UID: "b4ddd281-18ba-41ec-b5a6-788b9f5a942e"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.576127 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-config" (OuterVolumeSpecName: "console-config") pod "b4ddd281-18ba-41ec-b5a6-788b9f5a942e" (UID: "b4ddd281-18ba-41ec-b5a6-788b9f5a942e"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.576541 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-service-ca" (OuterVolumeSpecName: "service-ca") pod "b4ddd281-18ba-41ec-b5a6-788b9f5a942e" (UID: "b4ddd281-18ba-41ec-b5a6-788b9f5a942e"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.576698 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "b4ddd281-18ba-41ec-b5a6-788b9f5a942e" (UID: "b4ddd281-18ba-41ec-b5a6-788b9f5a942e"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.580586 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "b4ddd281-18ba-41ec-b5a6-788b9f5a942e" (UID: "b4ddd281-18ba-41ec-b5a6-788b9f5a942e"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.580972 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-kube-api-access-ctr9b" (OuterVolumeSpecName: "kube-api-access-ctr9b") pod "b4ddd281-18ba-41ec-b5a6-788b9f5a942e" (UID: "b4ddd281-18ba-41ec-b5a6-788b9f5a942e"). InnerVolumeSpecName "kube-api-access-ctr9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.581176 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "b4ddd281-18ba-41ec-b5a6-788b9f5a942e" (UID: "b4ddd281-18ba-41ec-b5a6-788b9f5a942e"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.676732 4636 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.676797 4636 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.676810 4636 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.676821 4636 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.676863 4636 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-service-ca\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.676876 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctr9b\" (UniqueName: \"kubernetes.io/projected/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-kube-api-access-ctr9b\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:51 crc kubenswrapper[4636]: I1002 21:36:51.676889 4636 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b4ddd281-18ba-41ec-b5a6-788b9f5a942e-console-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.003383 4636 generic.go:334] "Generic (PLEG): container finished" podID="96db42c7-fb6d-4f65-8618-005ed4e4f8fc" containerID="a471fe5d72129ee15fa8be039ecfa79819e1b91657f7e8ea75e6207c45aca2f3" exitCode=0 Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.003469 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" event={"ID":"96db42c7-fb6d-4f65-8618-005ed4e4f8fc","Type":"ContainerDied","Data":"a471fe5d72129ee15fa8be039ecfa79819e1b91657f7e8ea75e6207c45aca2f3"} Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.006509 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-wdwwc_b4ddd281-18ba-41ec-b5a6-788b9f5a942e/console/0.log" Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.006554 4636 generic.go:334] "Generic (PLEG): container finished" podID="b4ddd281-18ba-41ec-b5a6-788b9f5a942e" containerID="b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3" exitCode=2 Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.006584 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wdwwc" event={"ID":"b4ddd281-18ba-41ec-b5a6-788b9f5a942e","Type":"ContainerDied","Data":"b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3"} Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.006609 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wdwwc" event={"ID":"b4ddd281-18ba-41ec-b5a6-788b9f5a942e","Type":"ContainerDied","Data":"bd7aa2fc8fabef2d8a1d5abbe48e5c6fefd07e8d48cb4374034448e7a2aae36c"} Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.006631 4636 scope.go:117] "RemoveContainer" containerID="b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3" Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.006790 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wdwwc" Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.030996 4636 scope.go:117] "RemoveContainer" containerID="b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3" Oct 02 21:36:52 crc kubenswrapper[4636]: E1002 21:36:52.031523 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3\": container with ID starting with b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3 not found: ID does not exist" containerID="b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3" Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.031575 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3"} err="failed to get container status \"b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3\": rpc error: code = NotFound desc = could not find container \"b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3\": container with ID starting with b333ed1fdf8e27b060bf392d3f011e7a80f49eaf3f9f37d5b52442b7e55a83c3 not found: ID does not exist" Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.043632 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-wdwwc"] Oct 02 21:36:52 crc kubenswrapper[4636]: I1002 21:36:52.048239 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-wdwwc"] Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.014513 4636 generic.go:334] "Generic (PLEG): container finished" podID="b3533076-a10f-4e6a-a113-d08a5dcf932c" containerID="65f7f6e3a3d90eea2acd678dd83961cb0921db5aaed301635700aeb9f90f39fb" exitCode=0 Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.014725 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2pm2l" event={"ID":"b3533076-a10f-4e6a-a113-d08a5dcf932c","Type":"ContainerDied","Data":"65f7f6e3a3d90eea2acd678dd83961cb0921db5aaed301635700aeb9f90f39fb"} Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.276167 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.297247 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-bundle\") pod \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.300165 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-bundle" (OuterVolumeSpecName: "bundle") pod "96db42c7-fb6d-4f65-8618-005ed4e4f8fc" (UID: "96db42c7-fb6d-4f65-8618-005ed4e4f8fc"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.398405 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-util\") pod \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.398499 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tv24\" (UniqueName: \"kubernetes.io/projected/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-kube-api-access-2tv24\") pod \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\" (UID: \"96db42c7-fb6d-4f65-8618-005ed4e4f8fc\") " Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.398703 4636 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.403701 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-kube-api-access-2tv24" (OuterVolumeSpecName: "kube-api-access-2tv24") pod "96db42c7-fb6d-4f65-8618-005ed4e4f8fc" (UID: "96db42c7-fb6d-4f65-8618-005ed4e4f8fc"). InnerVolumeSpecName "kube-api-access-2tv24". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.467232 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-util" (OuterVolumeSpecName: "util") pod "96db42c7-fb6d-4f65-8618-005ed4e4f8fc" (UID: "96db42c7-fb6d-4f65-8618-005ed4e4f8fc"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.500023 4636 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-util\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.500064 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tv24\" (UniqueName: \"kubernetes.io/projected/96db42c7-fb6d-4f65-8618-005ed4e4f8fc-kube-api-access-2tv24\") on node \"crc\" DevicePath \"\"" Oct 02 21:36:53 crc kubenswrapper[4636]: I1002 21:36:53.613508 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4ddd281-18ba-41ec-b5a6-788b9f5a942e" path="/var/lib/kubelet/pods/b4ddd281-18ba-41ec-b5a6-788b9f5a942e/volumes" Oct 02 21:36:54 crc kubenswrapper[4636]: I1002 21:36:54.027952 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" event={"ID":"96db42c7-fb6d-4f65-8618-005ed4e4f8fc","Type":"ContainerDied","Data":"43a79384fe2bc7ac473808418a0dfd07566eef3493ab939325b8058b043da10a"} Oct 02 21:36:54 crc kubenswrapper[4636]: I1002 21:36:54.027989 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43a79384fe2bc7ac473808418a0dfd07566eef3493ab939325b8058b043da10a" Oct 02 21:36:54 crc kubenswrapper[4636]: I1002 21:36:54.027997 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d" Oct 02 21:36:54 crc kubenswrapper[4636]: I1002 21:36:54.031116 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2pm2l" event={"ID":"b3533076-a10f-4e6a-a113-d08a5dcf932c","Type":"ContainerStarted","Data":"cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5"} Oct 02 21:36:54 crc kubenswrapper[4636]: I1002 21:36:54.315089 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2pm2l" podStartSLOduration=2.744716412 podStartE2EDuration="5.315067883s" podCreationTimestamp="2025-10-02 21:36:49 +0000 UTC" firstStartedPulling="2025-10-02 21:36:50.996432989 +0000 UTC m=+802.319441008" lastFinishedPulling="2025-10-02 21:36:53.56678446 +0000 UTC m=+804.889792479" observedRunningTime="2025-10-02 21:36:54.060231844 +0000 UTC m=+805.383239903" watchObservedRunningTime="2025-10-02 21:36:54.315067883 +0000 UTC m=+805.638075912" Oct 02 21:36:59 crc kubenswrapper[4636]: I1002 21:36:59.595767 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:59 crc kubenswrapper[4636]: I1002 21:36:59.596845 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:36:59 crc kubenswrapper[4636]: I1002 21:36:59.634091 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:37:00 crc kubenswrapper[4636]: I1002 21:37:00.106957 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:37:01 crc kubenswrapper[4636]: I1002 21:37:01.239448 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2pm2l"] Oct 02 21:37:02 crc kubenswrapper[4636]: I1002 21:37:02.083426 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2pm2l" podUID="b3533076-a10f-4e6a-a113-d08a5dcf932c" containerName="registry-server" containerID="cri-o://cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5" gracePeriod=2 Oct 02 21:37:02 crc kubenswrapper[4636]: I1002 21:37:02.589465 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:37:02 crc kubenswrapper[4636]: I1002 21:37:02.719265 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chw6m\" (UniqueName: \"kubernetes.io/projected/b3533076-a10f-4e6a-a113-d08a5dcf932c-kube-api-access-chw6m\") pod \"b3533076-a10f-4e6a-a113-d08a5dcf932c\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " Oct 02 21:37:02 crc kubenswrapper[4636]: I1002 21:37:02.719348 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-utilities\") pod \"b3533076-a10f-4e6a-a113-d08a5dcf932c\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " Oct 02 21:37:02 crc kubenswrapper[4636]: I1002 21:37:02.719383 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-catalog-content\") pod \"b3533076-a10f-4e6a-a113-d08a5dcf932c\" (UID: \"b3533076-a10f-4e6a-a113-d08a5dcf932c\") " Oct 02 21:37:02 crc kubenswrapper[4636]: I1002 21:37:02.720247 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-utilities" (OuterVolumeSpecName: "utilities") pod "b3533076-a10f-4e6a-a113-d08a5dcf932c" (UID: "b3533076-a10f-4e6a-a113-d08a5dcf932c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:37:02 crc kubenswrapper[4636]: I1002 21:37:02.732415 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3533076-a10f-4e6a-a113-d08a5dcf932c-kube-api-access-chw6m" (OuterVolumeSpecName: "kube-api-access-chw6m") pod "b3533076-a10f-4e6a-a113-d08a5dcf932c" (UID: "b3533076-a10f-4e6a-a113-d08a5dcf932c"). InnerVolumeSpecName "kube-api-access-chw6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:37:02 crc kubenswrapper[4636]: I1002 21:37:02.799103 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b3533076-a10f-4e6a-a113-d08a5dcf932c" (UID: "b3533076-a10f-4e6a-a113-d08a5dcf932c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:37:02 crc kubenswrapper[4636]: I1002 21:37:02.821089 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chw6m\" (UniqueName: \"kubernetes.io/projected/b3533076-a10f-4e6a-a113-d08a5dcf932c-kube-api-access-chw6m\") on node \"crc\" DevicePath \"\"" Oct 02 21:37:02 crc kubenswrapper[4636]: I1002 21:37:02.821124 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:37:02 crc kubenswrapper[4636]: I1002 21:37:02.821133 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3533076-a10f-4e6a-a113-d08a5dcf932c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.090277 4636 generic.go:334] "Generic (PLEG): container finished" podID="b3533076-a10f-4e6a-a113-d08a5dcf932c" containerID="cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5" exitCode=0 Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.090315 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2pm2l" event={"ID":"b3533076-a10f-4e6a-a113-d08a5dcf932c","Type":"ContainerDied","Data":"cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5"} Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.090317 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2pm2l" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.090347 4636 scope.go:117] "RemoveContainer" containerID="cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.090338 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2pm2l" event={"ID":"b3533076-a10f-4e6a-a113-d08a5dcf932c","Type":"ContainerDied","Data":"9501964f1e1c4459cc6f32caecd35349bd253c1a2b78aa79d466599901883b86"} Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.115139 4636 scope.go:117] "RemoveContainer" containerID="65f7f6e3a3d90eea2acd678dd83961cb0921db5aaed301635700aeb9f90f39fb" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.132693 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2pm2l"] Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.139985 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2pm2l"] Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.148672 4636 scope.go:117] "RemoveContainer" containerID="3b3157d5321eaa0da35f4effff9a5784385a072623d9043cffddcbb3e4456f0f" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.151797 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx"] Oct 02 21:37:03 crc kubenswrapper[4636]: E1002 21:37:03.152049 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3533076-a10f-4e6a-a113-d08a5dcf932c" containerName="extract-content" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.152069 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3533076-a10f-4e6a-a113-d08a5dcf932c" containerName="extract-content" Oct 02 21:37:03 crc kubenswrapper[4636]: E1002 21:37:03.152083 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96db42c7-fb6d-4f65-8618-005ed4e4f8fc" containerName="pull" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.152092 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="96db42c7-fb6d-4f65-8618-005ed4e4f8fc" containerName="pull" Oct 02 21:37:03 crc kubenswrapper[4636]: E1002 21:37:03.152106 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96db42c7-fb6d-4f65-8618-005ed4e4f8fc" containerName="util" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.152115 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="96db42c7-fb6d-4f65-8618-005ed4e4f8fc" containerName="util" Oct 02 21:37:03 crc kubenswrapper[4636]: E1002 21:37:03.152130 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4ddd281-18ba-41ec-b5a6-788b9f5a942e" containerName="console" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.152139 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4ddd281-18ba-41ec-b5a6-788b9f5a942e" containerName="console" Oct 02 21:37:03 crc kubenswrapper[4636]: E1002 21:37:03.152151 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96db42c7-fb6d-4f65-8618-005ed4e4f8fc" containerName="extract" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.152159 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="96db42c7-fb6d-4f65-8618-005ed4e4f8fc" containerName="extract" Oct 02 21:37:03 crc kubenswrapper[4636]: E1002 21:37:03.152169 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3533076-a10f-4e6a-a113-d08a5dcf932c" containerName="extract-utilities" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.152177 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3533076-a10f-4e6a-a113-d08a5dcf932c" containerName="extract-utilities" Oct 02 21:37:03 crc kubenswrapper[4636]: E1002 21:37:03.152187 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3533076-a10f-4e6a-a113-d08a5dcf932c" containerName="registry-server" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.152192 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3533076-a10f-4e6a-a113-d08a5dcf932c" containerName="registry-server" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.152279 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="96db42c7-fb6d-4f65-8618-005ed4e4f8fc" containerName="extract" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.152293 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4ddd281-18ba-41ec-b5a6-788b9f5a942e" containerName="console" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.152310 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3533076-a10f-4e6a-a113-d08a5dcf932c" containerName="registry-server" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.152699 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.154508 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.154839 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.154998 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-2trd7" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.154880 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.157392 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.171569 4636 scope.go:117] "RemoveContainer" containerID="cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5" Oct 02 21:37:03 crc kubenswrapper[4636]: E1002 21:37:03.172687 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5\": container with ID starting with cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5 not found: ID does not exist" containerID="cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.172783 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5"} err="failed to get container status \"cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5\": rpc error: code = NotFound desc = could not find container \"cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5\": container with ID starting with cb35395b0f67ae2e80fad217ed53e41cc399b92424dd76b5804a2403961ed3b5 not found: ID does not exist" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.172807 4636 scope.go:117] "RemoveContainer" containerID="65f7f6e3a3d90eea2acd678dd83961cb0921db5aaed301635700aeb9f90f39fb" Oct 02 21:37:03 crc kubenswrapper[4636]: E1002 21:37:03.173276 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65f7f6e3a3d90eea2acd678dd83961cb0921db5aaed301635700aeb9f90f39fb\": container with ID starting with 65f7f6e3a3d90eea2acd678dd83961cb0921db5aaed301635700aeb9f90f39fb not found: ID does not exist" containerID="65f7f6e3a3d90eea2acd678dd83961cb0921db5aaed301635700aeb9f90f39fb" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.173318 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65f7f6e3a3d90eea2acd678dd83961cb0921db5aaed301635700aeb9f90f39fb"} err="failed to get container status \"65f7f6e3a3d90eea2acd678dd83961cb0921db5aaed301635700aeb9f90f39fb\": rpc error: code = NotFound desc = could not find container \"65f7f6e3a3d90eea2acd678dd83961cb0921db5aaed301635700aeb9f90f39fb\": container with ID starting with 65f7f6e3a3d90eea2acd678dd83961cb0921db5aaed301635700aeb9f90f39fb not found: ID does not exist" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.173345 4636 scope.go:117] "RemoveContainer" containerID="3b3157d5321eaa0da35f4effff9a5784385a072623d9043cffddcbb3e4456f0f" Oct 02 21:37:03 crc kubenswrapper[4636]: E1002 21:37:03.173633 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b3157d5321eaa0da35f4effff9a5784385a072623d9043cffddcbb3e4456f0f\": container with ID starting with 3b3157d5321eaa0da35f4effff9a5784385a072623d9043cffddcbb3e4456f0f not found: ID does not exist" containerID="3b3157d5321eaa0da35f4effff9a5784385a072623d9043cffddcbb3e4456f0f" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.173675 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b3157d5321eaa0da35f4effff9a5784385a072623d9043cffddcbb3e4456f0f"} err="failed to get container status \"3b3157d5321eaa0da35f4effff9a5784385a072623d9043cffddcbb3e4456f0f\": rpc error: code = NotFound desc = could not find container \"3b3157d5321eaa0da35f4effff9a5784385a072623d9043cffddcbb3e4456f0f\": container with ID starting with 3b3157d5321eaa0da35f4effff9a5784385a072623d9043cffddcbb3e4456f0f not found: ID does not exist" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.188681 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx"] Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.330306 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ctmn\" (UniqueName: \"kubernetes.io/projected/d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad-kube-api-access-2ctmn\") pod \"metallb-operator-controller-manager-7d556f69bd-9fsdx\" (UID: \"d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad\") " pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.330380 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad-webhook-cert\") pod \"metallb-operator-controller-manager-7d556f69bd-9fsdx\" (UID: \"d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad\") " pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.330479 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad-apiservice-cert\") pod \"metallb-operator-controller-manager-7d556f69bd-9fsdx\" (UID: \"d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad\") " pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.431798 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad-webhook-cert\") pod \"metallb-operator-controller-manager-7d556f69bd-9fsdx\" (UID: \"d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad\") " pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.431854 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad-apiservice-cert\") pod \"metallb-operator-controller-manager-7d556f69bd-9fsdx\" (UID: \"d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad\") " pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.431898 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ctmn\" (UniqueName: \"kubernetes.io/projected/d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad-kube-api-access-2ctmn\") pod \"metallb-operator-controller-manager-7d556f69bd-9fsdx\" (UID: \"d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad\") " pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.437654 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad-apiservice-cert\") pod \"metallb-operator-controller-manager-7d556f69bd-9fsdx\" (UID: \"d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad\") " pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.446063 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad-webhook-cert\") pod \"metallb-operator-controller-manager-7d556f69bd-9fsdx\" (UID: \"d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad\") " pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.472438 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ctmn\" (UniqueName: \"kubernetes.io/projected/d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad-kube-api-access-2ctmn\") pod \"metallb-operator-controller-manager-7d556f69bd-9fsdx\" (UID: \"d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad\") " pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.534352 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc"] Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.535287 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.539507 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-4cwdk" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.540206 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.540331 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.546777 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc"] Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.610187 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3533076-a10f-4e6a-a113-d08a5dcf932c" path="/var/lib/kubelet/pods/b3533076-a10f-4e6a-a113-d08a5dcf932c/volumes" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.735909 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4868bbba-0967-4af7-820a-ea4b90481964-webhook-cert\") pod \"metallb-operator-webhook-server-64bc94fc5b-pf5cc\" (UID: \"4868bbba-0967-4af7-820a-ea4b90481964\") " pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.735978 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4868bbba-0967-4af7-820a-ea4b90481964-apiservice-cert\") pod \"metallb-operator-webhook-server-64bc94fc5b-pf5cc\" (UID: \"4868bbba-0967-4af7-820a-ea4b90481964\") " pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.736037 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7wd2\" (UniqueName: \"kubernetes.io/projected/4868bbba-0967-4af7-820a-ea4b90481964-kube-api-access-g7wd2\") pod \"metallb-operator-webhook-server-64bc94fc5b-pf5cc\" (UID: \"4868bbba-0967-4af7-820a-ea4b90481964\") " pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.768879 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.837026 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4868bbba-0967-4af7-820a-ea4b90481964-apiservice-cert\") pod \"metallb-operator-webhook-server-64bc94fc5b-pf5cc\" (UID: \"4868bbba-0967-4af7-820a-ea4b90481964\") " pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.837141 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7wd2\" (UniqueName: \"kubernetes.io/projected/4868bbba-0967-4af7-820a-ea4b90481964-kube-api-access-g7wd2\") pod \"metallb-operator-webhook-server-64bc94fc5b-pf5cc\" (UID: \"4868bbba-0967-4af7-820a-ea4b90481964\") " pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.837169 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4868bbba-0967-4af7-820a-ea4b90481964-webhook-cert\") pod \"metallb-operator-webhook-server-64bc94fc5b-pf5cc\" (UID: \"4868bbba-0967-4af7-820a-ea4b90481964\") " pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.841373 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4868bbba-0967-4af7-820a-ea4b90481964-apiservice-cert\") pod \"metallb-operator-webhook-server-64bc94fc5b-pf5cc\" (UID: \"4868bbba-0967-4af7-820a-ea4b90481964\") " pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.842392 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4868bbba-0967-4af7-820a-ea4b90481964-webhook-cert\") pod \"metallb-operator-webhook-server-64bc94fc5b-pf5cc\" (UID: \"4868bbba-0967-4af7-820a-ea4b90481964\") " pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:03 crc kubenswrapper[4636]: I1002 21:37:03.857957 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7wd2\" (UniqueName: \"kubernetes.io/projected/4868bbba-0967-4af7-820a-ea4b90481964-kube-api-access-g7wd2\") pod \"metallb-operator-webhook-server-64bc94fc5b-pf5cc\" (UID: \"4868bbba-0967-4af7-820a-ea4b90481964\") " pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:04 crc kubenswrapper[4636]: I1002 21:37:04.148455 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:04 crc kubenswrapper[4636]: I1002 21:37:04.221704 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx"] Oct 02 21:37:04 crc kubenswrapper[4636]: W1002 21:37:04.232943 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd29773fe_6fbb_4f8c_8bdf_64fedf0df1ad.slice/crio-943e1274f806810a43266217f2808c6f462e5c9e591349b7de5c3d1086f6e9ac WatchSource:0}: Error finding container 943e1274f806810a43266217f2808c6f462e5c9e591349b7de5c3d1086f6e9ac: Status 404 returned error can't find the container with id 943e1274f806810a43266217f2808c6f462e5c9e591349b7de5c3d1086f6e9ac Oct 02 21:37:04 crc kubenswrapper[4636]: I1002 21:37:04.574956 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc"] Oct 02 21:37:05 crc kubenswrapper[4636]: I1002 21:37:05.102081 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" event={"ID":"4868bbba-0967-4af7-820a-ea4b90481964","Type":"ContainerStarted","Data":"78c5ca98b879cfd6c99ec1a037985dfb01ec61351cb19b16bb0603e59799826e"} Oct 02 21:37:05 crc kubenswrapper[4636]: I1002 21:37:05.103279 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" event={"ID":"d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad","Type":"ContainerStarted","Data":"943e1274f806810a43266217f2808c6f462e5c9e591349b7de5c3d1086f6e9ac"} Oct 02 21:37:10 crc kubenswrapper[4636]: I1002 21:37:10.135447 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" event={"ID":"4868bbba-0967-4af7-820a-ea4b90481964","Type":"ContainerStarted","Data":"d755662a8ffacd389c8ad4e0331f1ec1cb9a7040826dbb505884f5728c132ee0"} Oct 02 21:37:10 crc kubenswrapper[4636]: I1002 21:37:10.137063 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" event={"ID":"d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad","Type":"ContainerStarted","Data":"fcf71a8f39b18fb325822f554d954e55f98ca7f43b3823f230a08b2683b662ff"} Oct 02 21:37:10 crc kubenswrapper[4636]: I1002 21:37:10.137159 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:10 crc kubenswrapper[4636]: I1002 21:37:10.137253 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:10 crc kubenswrapper[4636]: I1002 21:37:10.156492 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" podStartSLOduration=2.075340278 podStartE2EDuration="7.156471368s" podCreationTimestamp="2025-10-02 21:37:03 +0000 UTC" firstStartedPulling="2025-10-02 21:37:04.592354949 +0000 UTC m=+815.915362968" lastFinishedPulling="2025-10-02 21:37:09.673486049 +0000 UTC m=+820.996494058" observedRunningTime="2025-10-02 21:37:10.153860367 +0000 UTC m=+821.476868396" watchObservedRunningTime="2025-10-02 21:37:10.156471368 +0000 UTC m=+821.479479387" Oct 02 21:37:10 crc kubenswrapper[4636]: I1002 21:37:10.179903 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" podStartSLOduration=1.761054489 podStartE2EDuration="7.179881716s" podCreationTimestamp="2025-10-02 21:37:03 +0000 UTC" firstStartedPulling="2025-10-02 21:37:04.239365645 +0000 UTC m=+815.562373654" lastFinishedPulling="2025-10-02 21:37:09.658192862 +0000 UTC m=+820.981200881" observedRunningTime="2025-10-02 21:37:10.175043904 +0000 UTC m=+821.498051943" watchObservedRunningTime="2025-10-02 21:37:10.179881716 +0000 UTC m=+821.502889765" Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.191985 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-m57c6"] Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.193331 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.198076 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m57c6"] Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.299139 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndm7n\" (UniqueName: \"kubernetes.io/projected/5d38014e-6074-4398-814f-c0024a97695c-kube-api-access-ndm7n\") pod \"community-operators-m57c6\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.299852 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-utilities\") pod \"community-operators-m57c6\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.299970 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-catalog-content\") pod \"community-operators-m57c6\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.400875 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndm7n\" (UniqueName: \"kubernetes.io/projected/5d38014e-6074-4398-814f-c0024a97695c-kube-api-access-ndm7n\") pod \"community-operators-m57c6\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.400939 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-utilities\") pod \"community-operators-m57c6\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.400962 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-catalog-content\") pod \"community-operators-m57c6\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.401450 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-catalog-content\") pod \"community-operators-m57c6\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.401686 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-utilities\") pod \"community-operators-m57c6\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.421659 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndm7n\" (UniqueName: \"kubernetes.io/projected/5d38014e-6074-4398-814f-c0024a97695c-kube-api-access-ndm7n\") pod \"community-operators-m57c6\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:17 crc kubenswrapper[4636]: I1002 21:37:17.521576 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:18 crc kubenswrapper[4636]: I1002 21:37:18.052887 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m57c6"] Oct 02 21:37:18 crc kubenswrapper[4636]: I1002 21:37:18.178979 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m57c6" event={"ID":"5d38014e-6074-4398-814f-c0024a97695c","Type":"ContainerStarted","Data":"28609703142a5608142932c065e57fe4a7ef340e2e7dd11e7ad0b716079d67ea"} Oct 02 21:37:19 crc kubenswrapper[4636]: I1002 21:37:19.185939 4636 generic.go:334] "Generic (PLEG): container finished" podID="5d38014e-6074-4398-814f-c0024a97695c" containerID="4cd21f4b57624c3b7b50d50cf892b287c4fc145c7c2f2d19b81194b803d8be8b" exitCode=0 Oct 02 21:37:19 crc kubenswrapper[4636]: I1002 21:37:19.186070 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m57c6" event={"ID":"5d38014e-6074-4398-814f-c0024a97695c","Type":"ContainerDied","Data":"4cd21f4b57624c3b7b50d50cf892b287c4fc145c7c2f2d19b81194b803d8be8b"} Oct 02 21:37:21 crc kubenswrapper[4636]: I1002 21:37:21.200265 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m57c6" event={"ID":"5d38014e-6074-4398-814f-c0024a97695c","Type":"ContainerStarted","Data":"49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a"} Oct 02 21:37:22 crc kubenswrapper[4636]: I1002 21:37:22.207449 4636 generic.go:334] "Generic (PLEG): container finished" podID="5d38014e-6074-4398-814f-c0024a97695c" containerID="49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a" exitCode=0 Oct 02 21:37:22 crc kubenswrapper[4636]: I1002 21:37:22.207506 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m57c6" event={"ID":"5d38014e-6074-4398-814f-c0024a97695c","Type":"ContainerDied","Data":"49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a"} Oct 02 21:37:24 crc kubenswrapper[4636]: I1002 21:37:24.155364 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-64bc94fc5b-pf5cc" Oct 02 21:37:24 crc kubenswrapper[4636]: I1002 21:37:24.225294 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m57c6" event={"ID":"5d38014e-6074-4398-814f-c0024a97695c","Type":"ContainerStarted","Data":"1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803"} Oct 02 21:37:24 crc kubenswrapper[4636]: I1002 21:37:24.251385 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-m57c6" podStartSLOduration=3.118434897 podStartE2EDuration="7.251362811s" podCreationTimestamp="2025-10-02 21:37:17 +0000 UTC" firstStartedPulling="2025-10-02 21:37:19.188521863 +0000 UTC m=+830.511529892" lastFinishedPulling="2025-10-02 21:37:23.321449787 +0000 UTC m=+834.644457806" observedRunningTime="2025-10-02 21:37:24.246986082 +0000 UTC m=+835.569994111" watchObservedRunningTime="2025-10-02 21:37:24.251362811 +0000 UTC m=+835.574370830" Oct 02 21:37:27 crc kubenswrapper[4636]: I1002 21:37:27.522768 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:27 crc kubenswrapper[4636]: I1002 21:37:27.523031 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:27 crc kubenswrapper[4636]: I1002 21:37:27.584104 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:28 crc kubenswrapper[4636]: I1002 21:37:28.282874 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:29 crc kubenswrapper[4636]: I1002 21:37:29.173007 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m57c6"] Oct 02 21:37:30 crc kubenswrapper[4636]: I1002 21:37:30.253857 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-m57c6" podUID="5d38014e-6074-4398-814f-c0024a97695c" containerName="registry-server" containerID="cri-o://1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803" gracePeriod=2 Oct 02 21:37:30 crc kubenswrapper[4636]: I1002 21:37:30.632068 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:30 crc kubenswrapper[4636]: I1002 21:37:30.777805 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndm7n\" (UniqueName: \"kubernetes.io/projected/5d38014e-6074-4398-814f-c0024a97695c-kube-api-access-ndm7n\") pod \"5d38014e-6074-4398-814f-c0024a97695c\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " Oct 02 21:37:30 crc kubenswrapper[4636]: I1002 21:37:30.777908 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-utilities\") pod \"5d38014e-6074-4398-814f-c0024a97695c\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " Oct 02 21:37:30 crc kubenswrapper[4636]: I1002 21:37:30.777933 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-catalog-content\") pod \"5d38014e-6074-4398-814f-c0024a97695c\" (UID: \"5d38014e-6074-4398-814f-c0024a97695c\") " Oct 02 21:37:30 crc kubenswrapper[4636]: I1002 21:37:30.778922 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-utilities" (OuterVolumeSpecName: "utilities") pod "5d38014e-6074-4398-814f-c0024a97695c" (UID: "5d38014e-6074-4398-814f-c0024a97695c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:37:30 crc kubenswrapper[4636]: I1002 21:37:30.788914 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d38014e-6074-4398-814f-c0024a97695c-kube-api-access-ndm7n" (OuterVolumeSpecName: "kube-api-access-ndm7n") pod "5d38014e-6074-4398-814f-c0024a97695c" (UID: "5d38014e-6074-4398-814f-c0024a97695c"). InnerVolumeSpecName "kube-api-access-ndm7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:37:30 crc kubenswrapper[4636]: I1002 21:37:30.827246 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d38014e-6074-4398-814f-c0024a97695c" (UID: "5d38014e-6074-4398-814f-c0024a97695c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:37:30 crc kubenswrapper[4636]: I1002 21:37:30.880069 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndm7n\" (UniqueName: \"kubernetes.io/projected/5d38014e-6074-4398-814f-c0024a97695c-kube-api-access-ndm7n\") on node \"crc\" DevicePath \"\"" Oct 02 21:37:30 crc kubenswrapper[4636]: I1002 21:37:30.880126 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:37:30 crc kubenswrapper[4636]: I1002 21:37:30.880146 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d38014e-6074-4398-814f-c0024a97695c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.260357 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m57c6" Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.261107 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m57c6" event={"ID":"5d38014e-6074-4398-814f-c0024a97695c","Type":"ContainerDied","Data":"1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803"} Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.261174 4636 scope.go:117] "RemoveContainer" containerID="1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803" Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.261470 4636 generic.go:334] "Generic (PLEG): container finished" podID="5d38014e-6074-4398-814f-c0024a97695c" containerID="1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803" exitCode=0 Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.261504 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m57c6" event={"ID":"5d38014e-6074-4398-814f-c0024a97695c","Type":"ContainerDied","Data":"28609703142a5608142932c065e57fe4a7ef340e2e7dd11e7ad0b716079d67ea"} Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.278124 4636 scope.go:117] "RemoveContainer" containerID="49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a" Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.286641 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m57c6"] Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.290009 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-m57c6"] Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.312856 4636 scope.go:117] "RemoveContainer" containerID="4cd21f4b57624c3b7b50d50cf892b287c4fc145c7c2f2d19b81194b803d8be8b" Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.333210 4636 scope.go:117] "RemoveContainer" containerID="1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803" Oct 02 21:37:31 crc kubenswrapper[4636]: E1002 21:37:31.334136 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803\": container with ID starting with 1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803 not found: ID does not exist" containerID="1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803" Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.334235 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803"} err="failed to get container status \"1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803\": rpc error: code = NotFound desc = could not find container \"1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803\": container with ID starting with 1e47d007392a3cb357c84f1cf67a8edb407038dd0d078f8a97a800a6a11b0803 not found: ID does not exist" Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.334312 4636 scope.go:117] "RemoveContainer" containerID="49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a" Oct 02 21:37:31 crc kubenswrapper[4636]: E1002 21:37:31.334871 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a\": container with ID starting with 49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a not found: ID does not exist" containerID="49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a" Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.334901 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a"} err="failed to get container status \"49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a\": rpc error: code = NotFound desc = could not find container \"49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a\": container with ID starting with 49ca947d7ae960f8a26a7cfeb3219eeb17615a47916be364773483c3ada7604a not found: ID does not exist" Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.334924 4636 scope.go:117] "RemoveContainer" containerID="4cd21f4b57624c3b7b50d50cf892b287c4fc145c7c2f2d19b81194b803d8be8b" Oct 02 21:37:31 crc kubenswrapper[4636]: E1002 21:37:31.335195 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cd21f4b57624c3b7b50d50cf892b287c4fc145c7c2f2d19b81194b803d8be8b\": container with ID starting with 4cd21f4b57624c3b7b50d50cf892b287c4fc145c7c2f2d19b81194b803d8be8b not found: ID does not exist" containerID="4cd21f4b57624c3b7b50d50cf892b287c4fc145c7c2f2d19b81194b803d8be8b" Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.335316 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cd21f4b57624c3b7b50d50cf892b287c4fc145c7c2f2d19b81194b803d8be8b"} err="failed to get container status \"4cd21f4b57624c3b7b50d50cf892b287c4fc145c7c2f2d19b81194b803d8be8b\": rpc error: code = NotFound desc = could not find container \"4cd21f4b57624c3b7b50d50cf892b287c4fc145c7c2f2d19b81194b803d8be8b\": container with ID starting with 4cd21f4b57624c3b7b50d50cf892b287c4fc145c7c2f2d19b81194b803d8be8b not found: ID does not exist" Oct 02 21:37:31 crc kubenswrapper[4636]: I1002 21:37:31.614938 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d38014e-6074-4398-814f-c0024a97695c" path="/var/lib/kubelet/pods/5d38014e-6074-4398-814f-c0024a97695c/volumes" Oct 02 21:37:43 crc kubenswrapper[4636]: I1002 21:37:43.771463 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7d556f69bd-9fsdx" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.715737 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-2ngwv"] Oct 02 21:37:44 crc kubenswrapper[4636]: E1002 21:37:44.716235 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d38014e-6074-4398-814f-c0024a97695c" containerName="extract-content" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.716246 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d38014e-6074-4398-814f-c0024a97695c" containerName="extract-content" Oct 02 21:37:44 crc kubenswrapper[4636]: E1002 21:37:44.716263 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d38014e-6074-4398-814f-c0024a97695c" containerName="extract-utilities" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.716269 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d38014e-6074-4398-814f-c0024a97695c" containerName="extract-utilities" Oct 02 21:37:44 crc kubenswrapper[4636]: E1002 21:37:44.716279 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d38014e-6074-4398-814f-c0024a97695c" containerName="registry-server" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.716285 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d38014e-6074-4398-814f-c0024a97695c" containerName="registry-server" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.716383 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d38014e-6074-4398-814f-c0024a97695c" containerName="registry-server" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.718242 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.721511 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-rc55x" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.726597 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb"] Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.727227 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.729390 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.733516 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.747963 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.751503 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb"] Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.789846 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpjtl\" (UniqueName: \"kubernetes.io/projected/ba780778-47a6-4a40-a3e4-72215f4e09fc-kube-api-access-kpjtl\") pod \"frr-k8s-webhook-server-64bf5d555-rrdgb\" (UID: \"ba780778-47a6-4a40-a3e4-72215f4e09fc\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.789900 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ba780778-47a6-4a40-a3e4-72215f4e09fc-cert\") pod \"frr-k8s-webhook-server-64bf5d555-rrdgb\" (UID: \"ba780778-47a6-4a40-a3e4-72215f4e09fc\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.789983 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-reloader\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.790032 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-metrics\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.790082 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-frr-conf\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.790104 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-frr-startup\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.790127 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vncs7\" (UniqueName: \"kubernetes.io/projected/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-kube-api-access-vncs7\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.790392 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-frr-sockets\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.790500 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-metrics-certs\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.858208 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-cjfrp"] Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.858995 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-cjfrp" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.861900 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.861934 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.861905 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-94kcv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.866285 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.889470 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-mlb7r"] Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891559 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vncs7\" (UniqueName: \"kubernetes.io/projected/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-kube-api-access-vncs7\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891608 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-metrics-certs\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891639 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-memberlist\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891667 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-frr-sockets\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891687 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-metrics-certs\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891707 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpjtl\" (UniqueName: \"kubernetes.io/projected/ba780778-47a6-4a40-a3e4-72215f4e09fc-kube-api-access-kpjtl\") pod \"frr-k8s-webhook-server-64bf5d555-rrdgb\" (UID: \"ba780778-47a6-4a40-a3e4-72215f4e09fc\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891725 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ba780778-47a6-4a40-a3e4-72215f4e09fc-cert\") pod \"frr-k8s-webhook-server-64bf5d555-rrdgb\" (UID: \"ba780778-47a6-4a40-a3e4-72215f4e09fc\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891741 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twhw8\" (UniqueName: \"kubernetes.io/projected/70229f69-0581-4a0f-9e52-a3bf04d33ddc-kube-api-access-twhw8\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891778 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-reloader\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891802 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-metrics\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891822 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/70229f69-0581-4a0f-9e52-a3bf04d33ddc-metallb-excludel2\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891848 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-frr-conf\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.891861 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-frr-startup\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.892796 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-frr-startup\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.893239 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-frr-sockets\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: E1002 21:37:44.893303 4636 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Oct 02 21:37:44 crc kubenswrapper[4636]: E1002 21:37:44.893337 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-metrics-certs podName:5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15 nodeName:}" failed. No retries permitted until 2025-10-02 21:37:45.393325358 +0000 UTC m=+856.716333377 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-metrics-certs") pod "frr-k8s-2ngwv" (UID: "5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15") : secret "frr-k8s-certs-secret" not found Oct 02 21:37:44 crc kubenswrapper[4636]: E1002 21:37:44.893479 4636 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Oct 02 21:37:44 crc kubenswrapper[4636]: E1002 21:37:44.893507 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ba780778-47a6-4a40-a3e4-72215f4e09fc-cert podName:ba780778-47a6-4a40-a3e4-72215f4e09fc nodeName:}" failed. No retries permitted until 2025-10-02 21:37:45.393500563 +0000 UTC m=+856.716508582 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ba780778-47a6-4a40-a3e4-72215f4e09fc-cert") pod "frr-k8s-webhook-server-64bf5d555-rrdgb" (UID: "ba780778-47a6-4a40-a3e4-72215f4e09fc") : secret "frr-k8s-webhook-server-cert" not found Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.893723 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-reloader\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.893911 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-metrics\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.894079 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-frr-conf\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.895265 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.898061 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.928787 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-mlb7r"] Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.949849 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vncs7\" (UniqueName: \"kubernetes.io/projected/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-kube-api-access-vncs7\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.955230 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpjtl\" (UniqueName: \"kubernetes.io/projected/ba780778-47a6-4a40-a3e4-72215f4e09fc-kube-api-access-kpjtl\") pod \"frr-k8s-webhook-server-64bf5d555-rrdgb\" (UID: \"ba780778-47a6-4a40-a3e4-72215f4e09fc\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.993141 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/70229f69-0581-4a0f-9e52-a3bf04d33ddc-metallb-excludel2\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.993445 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-799jj\" (UniqueName: \"kubernetes.io/projected/21241409-6577-4338-9478-d467bb5035fd-kube-api-access-799jj\") pod \"controller-68d546b9d8-mlb7r\" (UID: \"21241409-6577-4338-9478-d467bb5035fd\") " pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.993471 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-metrics-certs\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.993500 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-memberlist\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.993529 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/21241409-6577-4338-9478-d467bb5035fd-cert\") pod \"controller-68d546b9d8-mlb7r\" (UID: \"21241409-6577-4338-9478-d467bb5035fd\") " pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.993559 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/21241409-6577-4338-9478-d467bb5035fd-metrics-certs\") pod \"controller-68d546b9d8-mlb7r\" (UID: \"21241409-6577-4338-9478-d467bb5035fd\") " pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.993581 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twhw8\" (UniqueName: \"kubernetes.io/projected/70229f69-0581-4a0f-9e52-a3bf04d33ddc-kube-api-access-twhw8\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:44 crc kubenswrapper[4636]: E1002 21:37:44.993891 4636 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 02 21:37:44 crc kubenswrapper[4636]: E1002 21:37:44.993973 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-memberlist podName:70229f69-0581-4a0f-9e52-a3bf04d33ddc nodeName:}" failed. No retries permitted until 2025-10-02 21:37:45.493954706 +0000 UTC m=+856.816962725 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-memberlist") pod "speaker-cjfrp" (UID: "70229f69-0581-4a0f-9e52-a3bf04d33ddc") : secret "metallb-memberlist" not found Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.994165 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/70229f69-0581-4a0f-9e52-a3bf04d33ddc-metallb-excludel2\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:44 crc kubenswrapper[4636]: I1002 21:37:44.998667 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-metrics-certs\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.012346 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twhw8\" (UniqueName: \"kubernetes.io/projected/70229f69-0581-4a0f-9e52-a3bf04d33ddc-kube-api-access-twhw8\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.095132 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/21241409-6577-4338-9478-d467bb5035fd-cert\") pod \"controller-68d546b9d8-mlb7r\" (UID: \"21241409-6577-4338-9478-d467bb5035fd\") " pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.095183 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/21241409-6577-4338-9478-d467bb5035fd-metrics-certs\") pod \"controller-68d546b9d8-mlb7r\" (UID: \"21241409-6577-4338-9478-d467bb5035fd\") " pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.095240 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-799jj\" (UniqueName: \"kubernetes.io/projected/21241409-6577-4338-9478-d467bb5035fd-kube-api-access-799jj\") pod \"controller-68d546b9d8-mlb7r\" (UID: \"21241409-6577-4338-9478-d467bb5035fd\") " pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.098129 4636 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.098358 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/21241409-6577-4338-9478-d467bb5035fd-metrics-certs\") pod \"controller-68d546b9d8-mlb7r\" (UID: \"21241409-6577-4338-9478-d467bb5035fd\") " pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.108043 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/21241409-6577-4338-9478-d467bb5035fd-cert\") pod \"controller-68d546b9d8-mlb7r\" (UID: \"21241409-6577-4338-9478-d467bb5035fd\") " pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.112171 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-799jj\" (UniqueName: \"kubernetes.io/projected/21241409-6577-4338-9478-d467bb5035fd-kube-api-access-799jj\") pod \"controller-68d546b9d8-mlb7r\" (UID: \"21241409-6577-4338-9478-d467bb5035fd\") " pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.231527 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.403470 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-metrics-certs\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.403851 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ba780778-47a6-4a40-a3e4-72215f4e09fc-cert\") pod \"frr-k8s-webhook-server-64bf5d555-rrdgb\" (UID: \"ba780778-47a6-4a40-a3e4-72215f4e09fc\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.409248 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15-metrics-certs\") pod \"frr-k8s-2ngwv\" (UID: \"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15\") " pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.409379 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ba780778-47a6-4a40-a3e4-72215f4e09fc-cert\") pod \"frr-k8s-webhook-server-64bf5d555-rrdgb\" (UID: \"ba780778-47a6-4a40-a3e4-72215f4e09fc\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.449874 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-mlb7r"] Oct 02 21:37:45 crc kubenswrapper[4636]: W1002 21:37:45.457949 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21241409_6577_4338_9478_d467bb5035fd.slice/crio-27457a141c90addad4e32bccdba10a4fac84be115c28214577c9defac9cf8308 WatchSource:0}: Error finding container 27457a141c90addad4e32bccdba10a4fac84be115c28214577c9defac9cf8308: Status 404 returned error can't find the container with id 27457a141c90addad4e32bccdba10a4fac84be115c28214577c9defac9cf8308 Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.504600 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-memberlist\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:45 crc kubenswrapper[4636]: E1002 21:37:45.504761 4636 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 02 21:37:45 crc kubenswrapper[4636]: E1002 21:37:45.504829 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-memberlist podName:70229f69-0581-4a0f-9e52-a3bf04d33ddc nodeName:}" failed. No retries permitted until 2025-10-02 21:37:46.504813964 +0000 UTC m=+857.827821983 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-memberlist") pod "speaker-cjfrp" (UID: "70229f69-0581-4a0f-9e52-a3bf04d33ddc") : secret "metallb-memberlist" not found Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.636028 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.645593 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" Oct 02 21:37:45 crc kubenswrapper[4636]: I1002 21:37:45.869282 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb"] Oct 02 21:37:45 crc kubenswrapper[4636]: W1002 21:37:45.896499 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba780778_47a6_4a40_a3e4_72215f4e09fc.slice/crio-1003382571d4779aebe4aacfc48851aab35b9649190e9bfe1248afdf3c67c17c WatchSource:0}: Error finding container 1003382571d4779aebe4aacfc48851aab35b9649190e9bfe1248afdf3c67c17c: Status 404 returned error can't find the container with id 1003382571d4779aebe4aacfc48851aab35b9649190e9bfe1248afdf3c67c17c Oct 02 21:37:46 crc kubenswrapper[4636]: I1002 21:37:46.347981 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" event={"ID":"ba780778-47a6-4a40-a3e4-72215f4e09fc","Type":"ContainerStarted","Data":"1003382571d4779aebe4aacfc48851aab35b9649190e9bfe1248afdf3c67c17c"} Oct 02 21:37:46 crc kubenswrapper[4636]: I1002 21:37:46.349052 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2ngwv" event={"ID":"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15","Type":"ContainerStarted","Data":"496d2685274f173feed342669ed336e116c08adb583a62329e65900bc10c39ef"} Oct 02 21:37:46 crc kubenswrapper[4636]: I1002 21:37:46.350542 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-mlb7r" event={"ID":"21241409-6577-4338-9478-d467bb5035fd","Type":"ContainerStarted","Data":"35567d7cd23b19fe8826c4af5b22540498e5128c8f7e947d11f363b7c303a07a"} Oct 02 21:37:46 crc kubenswrapper[4636]: I1002 21:37:46.350565 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-mlb7r" event={"ID":"21241409-6577-4338-9478-d467bb5035fd","Type":"ContainerStarted","Data":"08eaf988f10e658a80ccee149038ba761ff067e26c19d494ab40d0f978b9f437"} Oct 02 21:37:46 crc kubenswrapper[4636]: I1002 21:37:46.350577 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-mlb7r" event={"ID":"21241409-6577-4338-9478-d467bb5035fd","Type":"ContainerStarted","Data":"27457a141c90addad4e32bccdba10a4fac84be115c28214577c9defac9cf8308"} Oct 02 21:37:46 crc kubenswrapper[4636]: I1002 21:37:46.350695 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:46 crc kubenswrapper[4636]: I1002 21:37:46.371117 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-mlb7r" podStartSLOduration=2.371094443 podStartE2EDuration="2.371094443s" podCreationTimestamp="2025-10-02 21:37:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:37:46.368849813 +0000 UTC m=+857.691857842" watchObservedRunningTime="2025-10-02 21:37:46.371094443 +0000 UTC m=+857.694102462" Oct 02 21:37:46 crc kubenswrapper[4636]: I1002 21:37:46.519201 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-memberlist\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:46 crc kubenswrapper[4636]: I1002 21:37:46.525103 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/70229f69-0581-4a0f-9e52-a3bf04d33ddc-memberlist\") pod \"speaker-cjfrp\" (UID: \"70229f69-0581-4a0f-9e52-a3bf04d33ddc\") " pod="metallb-system/speaker-cjfrp" Oct 02 21:37:46 crc kubenswrapper[4636]: I1002 21:37:46.705858 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-cjfrp" Oct 02 21:37:46 crc kubenswrapper[4636]: W1002 21:37:46.732579 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70229f69_0581_4a0f_9e52_a3bf04d33ddc.slice/crio-e6774fd5903d9057015f0d6e89d272f1a5580ba45747411e0b500f3812bef846 WatchSource:0}: Error finding container e6774fd5903d9057015f0d6e89d272f1a5580ba45747411e0b500f3812bef846: Status 404 returned error can't find the container with id e6774fd5903d9057015f0d6e89d272f1a5580ba45747411e0b500f3812bef846 Oct 02 21:37:47 crc kubenswrapper[4636]: I1002 21:37:47.358637 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-cjfrp" event={"ID":"70229f69-0581-4a0f-9e52-a3bf04d33ddc","Type":"ContainerStarted","Data":"a77fa57fc9a3d8e48aa762eb1ebc6e2ecc994e6f425cc862c037a33fd7936c45"} Oct 02 21:37:47 crc kubenswrapper[4636]: I1002 21:37:47.358680 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-cjfrp" event={"ID":"70229f69-0581-4a0f-9e52-a3bf04d33ddc","Type":"ContainerStarted","Data":"8dcc799d9cb3a28b3b59a4d4a6d0a0f5afc12be77105d3aa67e24a8b6b82a31b"} Oct 02 21:37:47 crc kubenswrapper[4636]: I1002 21:37:47.358693 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-cjfrp" event={"ID":"70229f69-0581-4a0f-9e52-a3bf04d33ddc","Type":"ContainerStarted","Data":"e6774fd5903d9057015f0d6e89d272f1a5580ba45747411e0b500f3812bef846"} Oct 02 21:37:47 crc kubenswrapper[4636]: I1002 21:37:47.359389 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-cjfrp" Oct 02 21:37:47 crc kubenswrapper[4636]: I1002 21:37:47.383627 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-cjfrp" podStartSLOduration=3.3836104430000002 podStartE2EDuration="3.383610443s" podCreationTimestamp="2025-10-02 21:37:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:37:47.379777409 +0000 UTC m=+858.702785458" watchObservedRunningTime="2025-10-02 21:37:47.383610443 +0000 UTC m=+858.706618462" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.239270 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-mlb7r" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.271794 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7wcwv"] Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.273376 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.292520 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7wcwv"] Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.437690 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-catalog-content\") pod \"certified-operators-7wcwv\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.438230 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-utilities\") pod \"certified-operators-7wcwv\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.438319 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-527c9\" (UniqueName: \"kubernetes.io/projected/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-kube-api-access-527c9\") pod \"certified-operators-7wcwv\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.539275 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-utilities\") pod \"certified-operators-7wcwv\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.539343 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-527c9\" (UniqueName: \"kubernetes.io/projected/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-kube-api-access-527c9\") pod \"certified-operators-7wcwv\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.539412 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-catalog-content\") pod \"certified-operators-7wcwv\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.539853 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-utilities\") pod \"certified-operators-7wcwv\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.539880 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-catalog-content\") pod \"certified-operators-7wcwv\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.558437 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-527c9\" (UniqueName: \"kubernetes.io/projected/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-kube-api-access-527c9\") pod \"certified-operators-7wcwv\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:37:55 crc kubenswrapper[4636]: I1002 21:37:55.694210 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:37:56 crc kubenswrapper[4636]: I1002 21:37:56.423730 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" event={"ID":"ba780778-47a6-4a40-a3e4-72215f4e09fc","Type":"ContainerStarted","Data":"78e3d35ba507727c4fb0d8c67c24db3100fea1cfdeda6a7c916d0427616b4d19"} Oct 02 21:37:56 crc kubenswrapper[4636]: I1002 21:37:56.424035 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" Oct 02 21:37:56 crc kubenswrapper[4636]: I1002 21:37:56.426336 4636 generic.go:334] "Generic (PLEG): container finished" podID="5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15" containerID="2251aa932ddcfeab66b92369f90f84c8aeb56c1d6f9714a5955db910299b4c35" exitCode=0 Oct 02 21:37:56 crc kubenswrapper[4636]: I1002 21:37:56.426366 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2ngwv" event={"ID":"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15","Type":"ContainerDied","Data":"2251aa932ddcfeab66b92369f90f84c8aeb56c1d6f9714a5955db910299b4c35"} Oct 02 21:37:56 crc kubenswrapper[4636]: I1002 21:37:56.436166 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" podStartSLOduration=2.174971536 podStartE2EDuration="12.436147091s" podCreationTimestamp="2025-10-02 21:37:44 +0000 UTC" firstStartedPulling="2025-10-02 21:37:45.898468247 +0000 UTC m=+857.221476266" lastFinishedPulling="2025-10-02 21:37:56.159643802 +0000 UTC m=+867.482651821" observedRunningTime="2025-10-02 21:37:56.435261127 +0000 UTC m=+867.758269146" watchObservedRunningTime="2025-10-02 21:37:56.436147091 +0000 UTC m=+867.759155110" Oct 02 21:37:56 crc kubenswrapper[4636]: I1002 21:37:56.567218 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7wcwv"] Oct 02 21:37:56 crc kubenswrapper[4636]: W1002 21:37:56.569514 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41c07b36_34a8_4866_bbc9_7ccbb5cbf4ea.slice/crio-7301ef59cac34baa233cc47db6b96005efbe8d6db6154dc9990b7991e12e4e5d WatchSource:0}: Error finding container 7301ef59cac34baa233cc47db6b96005efbe8d6db6154dc9990b7991e12e4e5d: Status 404 returned error can't find the container with id 7301ef59cac34baa233cc47db6b96005efbe8d6db6154dc9990b7991e12e4e5d Oct 02 21:37:56 crc kubenswrapper[4636]: I1002 21:37:56.712450 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-cjfrp" Oct 02 21:37:57 crc kubenswrapper[4636]: I1002 21:37:57.434876 4636 generic.go:334] "Generic (PLEG): container finished" podID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" containerID="b9ca47808466f5ce64f97d26cc2a27955525afa4aadd0264bb27ee192ba76fb1" exitCode=0 Oct 02 21:37:57 crc kubenswrapper[4636]: I1002 21:37:57.435009 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wcwv" event={"ID":"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea","Type":"ContainerDied","Data":"b9ca47808466f5ce64f97d26cc2a27955525afa4aadd0264bb27ee192ba76fb1"} Oct 02 21:37:57 crc kubenswrapper[4636]: I1002 21:37:57.435059 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wcwv" event={"ID":"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea","Type":"ContainerStarted","Data":"7301ef59cac34baa233cc47db6b96005efbe8d6db6154dc9990b7991e12e4e5d"} Oct 02 21:37:57 crc kubenswrapper[4636]: I1002 21:37:57.437513 4636 generic.go:334] "Generic (PLEG): container finished" podID="5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15" containerID="59f3e296094c790ae1701aa4eb18753c75311593604f65f014cfab29c8e91d66" exitCode=0 Oct 02 21:37:57 crc kubenswrapper[4636]: I1002 21:37:57.437632 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2ngwv" event={"ID":"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15","Type":"ContainerDied","Data":"59f3e296094c790ae1701aa4eb18753c75311593604f65f014cfab29c8e91d66"} Oct 02 21:37:58 crc kubenswrapper[4636]: I1002 21:37:58.444395 4636 generic.go:334] "Generic (PLEG): container finished" podID="5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15" containerID="14efeaf1b24d474478e8a181871f4168d19869e29af055ba8e8a3d32c07d7059" exitCode=0 Oct 02 21:37:58 crc kubenswrapper[4636]: I1002 21:37:58.444730 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2ngwv" event={"ID":"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15","Type":"ContainerDied","Data":"14efeaf1b24d474478e8a181871f4168d19869e29af055ba8e8a3d32c07d7059"} Oct 02 21:38:00 crc kubenswrapper[4636]: I1002 21:38:00.459546 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2ngwv" event={"ID":"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15","Type":"ContainerStarted","Data":"00da9d83472f9ff6a7b275bc27e3be8823f28ac6bc2a08c93c9632fc9dbb6c2d"} Oct 02 21:38:00 crc kubenswrapper[4636]: I1002 21:38:00.474026 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-2gnb9"] Oct 02 21:38:00 crc kubenswrapper[4636]: I1002 21:38:00.475062 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2gnb9" Oct 02 21:38:00 crc kubenswrapper[4636]: I1002 21:38:00.478680 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 02 21:38:00 crc kubenswrapper[4636]: I1002 21:38:00.480904 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-tcg8x" Oct 02 21:38:00 crc kubenswrapper[4636]: I1002 21:38:00.480912 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 02 21:38:00 crc kubenswrapper[4636]: I1002 21:38:00.493647 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2gnb9"] Oct 02 21:38:00 crc kubenswrapper[4636]: I1002 21:38:00.609584 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndhgj\" (UniqueName: \"kubernetes.io/projected/b8489c3b-7f53-413f-a04e-2909054e5d28-kube-api-access-ndhgj\") pod \"openstack-operator-index-2gnb9\" (UID: \"b8489c3b-7f53-413f-a04e-2909054e5d28\") " pod="openstack-operators/openstack-operator-index-2gnb9" Oct 02 21:38:00 crc kubenswrapper[4636]: I1002 21:38:00.710910 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndhgj\" (UniqueName: \"kubernetes.io/projected/b8489c3b-7f53-413f-a04e-2909054e5d28-kube-api-access-ndhgj\") pod \"openstack-operator-index-2gnb9\" (UID: \"b8489c3b-7f53-413f-a04e-2909054e5d28\") " pod="openstack-operators/openstack-operator-index-2gnb9" Oct 02 21:38:00 crc kubenswrapper[4636]: I1002 21:38:00.740128 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndhgj\" (UniqueName: \"kubernetes.io/projected/b8489c3b-7f53-413f-a04e-2909054e5d28-kube-api-access-ndhgj\") pod \"openstack-operator-index-2gnb9\" (UID: \"b8489c3b-7f53-413f-a04e-2909054e5d28\") " pod="openstack-operators/openstack-operator-index-2gnb9" Oct 02 21:38:00 crc kubenswrapper[4636]: I1002 21:38:00.796937 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2gnb9" Oct 02 21:38:01 crc kubenswrapper[4636]: I1002 21:38:01.220289 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2gnb9"] Oct 02 21:38:01 crc kubenswrapper[4636]: W1002 21:38:01.231809 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8489c3b_7f53_413f_a04e_2909054e5d28.slice/crio-82d69832a81feb1813394e8491c8698bd50abfcb99768fd7a05779caedf27b54 WatchSource:0}: Error finding container 82d69832a81feb1813394e8491c8698bd50abfcb99768fd7a05779caedf27b54: Status 404 returned error can't find the container with id 82d69832a81feb1813394e8491c8698bd50abfcb99768fd7a05779caedf27b54 Oct 02 21:38:01 crc kubenswrapper[4636]: I1002 21:38:01.468368 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2gnb9" event={"ID":"b8489c3b-7f53-413f-a04e-2909054e5d28","Type":"ContainerStarted","Data":"82d69832a81feb1813394e8491c8698bd50abfcb99768fd7a05779caedf27b54"} Oct 02 21:38:01 crc kubenswrapper[4636]: I1002 21:38:01.470676 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2ngwv" event={"ID":"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15","Type":"ContainerStarted","Data":"6794aa40164e2cb8ce2ff7259c072c51384fa621ca0c9402fcf18379cbabdf08"} Oct 02 21:38:01 crc kubenswrapper[4636]: I1002 21:38:01.470737 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2ngwv" event={"ID":"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15","Type":"ContainerStarted","Data":"45ae95fa0a818ee7227508b4a6fe4ce18b0b8a20f7ef85afa2c9ceb807d3a127"} Oct 02 21:38:01 crc kubenswrapper[4636]: I1002 21:38:01.472254 4636 generic.go:334] "Generic (PLEG): container finished" podID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" containerID="3c1dfd8c31fce877a819381cd42d7b1c20af85aeea1b31822798ba0a0a07abbf" exitCode=0 Oct 02 21:38:01 crc kubenswrapper[4636]: I1002 21:38:01.472275 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wcwv" event={"ID":"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea","Type":"ContainerDied","Data":"3c1dfd8c31fce877a819381cd42d7b1c20af85aeea1b31822798ba0a0a07abbf"} Oct 02 21:38:02 crc kubenswrapper[4636]: I1002 21:38:02.485708 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2ngwv" event={"ID":"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15","Type":"ContainerStarted","Data":"8a4be5c1ce75798917d21626c82cef345349985660c9d16a37e08980639d7ba1"} Oct 02 21:38:02 crc kubenswrapper[4636]: I1002 21:38:02.486104 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2ngwv" event={"ID":"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15","Type":"ContainerStarted","Data":"4c334ec762ee9f007f37fd446df33175a3f35063f01cccc20b8a3775e807e01c"} Oct 02 21:38:02 crc kubenswrapper[4636]: I1002 21:38:02.486163 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2ngwv" event={"ID":"5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15","Type":"ContainerStarted","Data":"4322a8931417f009f15f59047f4e52ed606543bdb98b044486f01dfe85da34c0"} Oct 02 21:38:03 crc kubenswrapper[4636]: I1002 21:38:03.496489 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:38:03 crc kubenswrapper[4636]: I1002 21:38:03.542068 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-2ngwv" podStartSLOduration=9.1145568 podStartE2EDuration="19.542038407s" podCreationTimestamp="2025-10-02 21:37:44 +0000 UTC" firstStartedPulling="2025-10-02 21:37:45.756459571 +0000 UTC m=+857.079467600" lastFinishedPulling="2025-10-02 21:37:56.183941188 +0000 UTC m=+867.506949207" observedRunningTime="2025-10-02 21:38:03.533408904 +0000 UTC m=+874.856416973" watchObservedRunningTime="2025-10-02 21:38:03.542038407 +0000 UTC m=+874.865046496" Oct 02 21:38:05 crc kubenswrapper[4636]: I1002 21:38:05.636625 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:38:05 crc kubenswrapper[4636]: I1002 21:38:05.682163 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:38:05 crc kubenswrapper[4636]: I1002 21:38:05.850233 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2gnb9"] Oct 02 21:38:06 crc kubenswrapper[4636]: I1002 21:38:06.660701 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-c5h77"] Oct 02 21:38:06 crc kubenswrapper[4636]: I1002 21:38:06.661649 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c5h77" Oct 02 21:38:06 crc kubenswrapper[4636]: I1002 21:38:06.666623 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-c5h77"] Oct 02 21:38:06 crc kubenswrapper[4636]: I1002 21:38:06.804723 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl2z7\" (UniqueName: \"kubernetes.io/projected/1c7d8548-5763-4dad-a80a-69027aef1f92-kube-api-access-cl2z7\") pod \"openstack-operator-index-c5h77\" (UID: \"1c7d8548-5763-4dad-a80a-69027aef1f92\") " pod="openstack-operators/openstack-operator-index-c5h77" Oct 02 21:38:06 crc kubenswrapper[4636]: I1002 21:38:06.906011 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl2z7\" (UniqueName: \"kubernetes.io/projected/1c7d8548-5763-4dad-a80a-69027aef1f92-kube-api-access-cl2z7\") pod \"openstack-operator-index-c5h77\" (UID: \"1c7d8548-5763-4dad-a80a-69027aef1f92\") " pod="openstack-operators/openstack-operator-index-c5h77" Oct 02 21:38:06 crc kubenswrapper[4636]: I1002 21:38:06.930621 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl2z7\" (UniqueName: \"kubernetes.io/projected/1c7d8548-5763-4dad-a80a-69027aef1f92-kube-api-access-cl2z7\") pod \"openstack-operator-index-c5h77\" (UID: \"1c7d8548-5763-4dad-a80a-69027aef1f92\") " pod="openstack-operators/openstack-operator-index-c5h77" Oct 02 21:38:06 crc kubenswrapper[4636]: I1002 21:38:06.980515 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c5h77" Oct 02 21:38:07 crc kubenswrapper[4636]: I1002 21:38:07.630535 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-c5h77"] Oct 02 21:38:07 crc kubenswrapper[4636]: W1002 21:38:07.667409 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c7d8548_5763_4dad_a80a_69027aef1f92.slice/crio-7b180742771b453d25238e1d80b6f22691ffab4f7ba61a7749c931f3c503f44e WatchSource:0}: Error finding container 7b180742771b453d25238e1d80b6f22691ffab4f7ba61a7749c931f3c503f44e: Status 404 returned error can't find the container with id 7b180742771b453d25238e1d80b6f22691ffab4f7ba61a7749c931f3c503f44e Oct 02 21:38:08 crc kubenswrapper[4636]: I1002 21:38:08.528208 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c5h77" event={"ID":"1c7d8548-5763-4dad-a80a-69027aef1f92","Type":"ContainerStarted","Data":"7b180742771b453d25238e1d80b6f22691ffab4f7ba61a7749c931f3c503f44e"} Oct 02 21:38:08 crc kubenswrapper[4636]: I1002 21:38:08.530303 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wcwv" event={"ID":"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea","Type":"ContainerStarted","Data":"36b82bd5f285669bcee999c8a371adb1153536af613eef809b06b976686fc0f9"} Oct 02 21:38:08 crc kubenswrapper[4636]: I1002 21:38:08.558584 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7wcwv" podStartSLOduration=3.618235012 podStartE2EDuration="13.558565288s" podCreationTimestamp="2025-10-02 21:37:55 +0000 UTC" firstStartedPulling="2025-10-02 21:37:57.436879512 +0000 UTC m=+868.759887541" lastFinishedPulling="2025-10-02 21:38:07.377209798 +0000 UTC m=+878.700217817" observedRunningTime="2025-10-02 21:38:08.554349744 +0000 UTC m=+879.877357753" watchObservedRunningTime="2025-10-02 21:38:08.558565288 +0000 UTC m=+879.881573317" Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.472128 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-r69tz"] Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.474455 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.480253 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r69tz"] Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.559385 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-catalog-content\") pod \"redhat-marketplace-r69tz\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.559433 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvtxf\" (UniqueName: \"kubernetes.io/projected/766fba84-e2b3-445e-b4bd-f9c3695bc872-kube-api-access-fvtxf\") pod \"redhat-marketplace-r69tz\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.559451 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-utilities\") pod \"redhat-marketplace-r69tz\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.660638 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-catalog-content\") pod \"redhat-marketplace-r69tz\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.660696 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvtxf\" (UniqueName: \"kubernetes.io/projected/766fba84-e2b3-445e-b4bd-f9c3695bc872-kube-api-access-fvtxf\") pod \"redhat-marketplace-r69tz\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.660723 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-utilities\") pod \"redhat-marketplace-r69tz\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.661215 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-catalog-content\") pod \"redhat-marketplace-r69tz\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.661259 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-utilities\") pod \"redhat-marketplace-r69tz\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.682863 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvtxf\" (UniqueName: \"kubernetes.io/projected/766fba84-e2b3-445e-b4bd-f9c3695bc872-kube-api-access-fvtxf\") pod \"redhat-marketplace-r69tz\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:09 crc kubenswrapper[4636]: I1002 21:38:09.861778 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:10 crc kubenswrapper[4636]: I1002 21:38:10.258166 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r69tz"] Oct 02 21:38:10 crc kubenswrapper[4636]: W1002 21:38:10.264308 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod766fba84_e2b3_445e_b4bd_f9c3695bc872.slice/crio-a654f5505db7e2b023f17b9a6770e2833759972a47e355c233073523341bac32 WatchSource:0}: Error finding container a654f5505db7e2b023f17b9a6770e2833759972a47e355c233073523341bac32: Status 404 returned error can't find the container with id a654f5505db7e2b023f17b9a6770e2833759972a47e355c233073523341bac32 Oct 02 21:38:10 crc kubenswrapper[4636]: I1002 21:38:10.548709 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2gnb9" event={"ID":"b8489c3b-7f53-413f-a04e-2909054e5d28","Type":"ContainerStarted","Data":"7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439"} Oct 02 21:38:10 crc kubenswrapper[4636]: I1002 21:38:10.548781 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-2gnb9" podUID="b8489c3b-7f53-413f-a04e-2909054e5d28" containerName="registry-server" containerID="cri-o://7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439" gracePeriod=2 Oct 02 21:38:10 crc kubenswrapper[4636]: I1002 21:38:10.552228 4636 generic.go:334] "Generic (PLEG): container finished" podID="766fba84-e2b3-445e-b4bd-f9c3695bc872" containerID="7672790f98704470fb816d42e90334a5435eaa52431bab814eec4c127e7b0606" exitCode=0 Oct 02 21:38:10 crc kubenswrapper[4636]: I1002 21:38:10.552272 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r69tz" event={"ID":"766fba84-e2b3-445e-b4bd-f9c3695bc872","Type":"ContainerDied","Data":"7672790f98704470fb816d42e90334a5435eaa52431bab814eec4c127e7b0606"} Oct 02 21:38:10 crc kubenswrapper[4636]: I1002 21:38:10.552295 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r69tz" event={"ID":"766fba84-e2b3-445e-b4bd-f9c3695bc872","Type":"ContainerStarted","Data":"a654f5505db7e2b023f17b9a6770e2833759972a47e355c233073523341bac32"} Oct 02 21:38:10 crc kubenswrapper[4636]: I1002 21:38:10.568420 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-2gnb9" podStartSLOduration=1.632489918 podStartE2EDuration="10.568400885s" podCreationTimestamp="2025-10-02 21:38:00 +0000 UTC" firstStartedPulling="2025-10-02 21:38:01.235521326 +0000 UTC m=+872.558529345" lastFinishedPulling="2025-10-02 21:38:10.171432293 +0000 UTC m=+881.494440312" observedRunningTime="2025-10-02 21:38:10.567067559 +0000 UTC m=+881.890075598" watchObservedRunningTime="2025-10-02 21:38:10.568400885 +0000 UTC m=+881.891408904" Oct 02 21:38:10 crc kubenswrapper[4636]: I1002 21:38:10.798259 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-2gnb9" Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.040993 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2gnb9" Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.179990 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndhgj\" (UniqueName: \"kubernetes.io/projected/b8489c3b-7f53-413f-a04e-2909054e5d28-kube-api-access-ndhgj\") pod \"b8489c3b-7f53-413f-a04e-2909054e5d28\" (UID: \"b8489c3b-7f53-413f-a04e-2909054e5d28\") " Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.187053 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8489c3b-7f53-413f-a04e-2909054e5d28-kube-api-access-ndhgj" (OuterVolumeSpecName: "kube-api-access-ndhgj") pod "b8489c3b-7f53-413f-a04e-2909054e5d28" (UID: "b8489c3b-7f53-413f-a04e-2909054e5d28"). InnerVolumeSpecName "kube-api-access-ndhgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.281316 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndhgj\" (UniqueName: \"kubernetes.io/projected/b8489c3b-7f53-413f-a04e-2909054e5d28-kube-api-access-ndhgj\") on node \"crc\" DevicePath \"\"" Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.563480 4636 generic.go:334] "Generic (PLEG): container finished" podID="b8489c3b-7f53-413f-a04e-2909054e5d28" containerID="7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439" exitCode=0 Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.563556 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2gnb9" event={"ID":"b8489c3b-7f53-413f-a04e-2909054e5d28","Type":"ContainerDied","Data":"7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439"} Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.563604 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2gnb9" event={"ID":"b8489c3b-7f53-413f-a04e-2909054e5d28","Type":"ContainerDied","Data":"82d69832a81feb1813394e8491c8698bd50abfcb99768fd7a05779caedf27b54"} Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.563682 4636 scope.go:117] "RemoveContainer" containerID="7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439" Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.564842 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c5h77" event={"ID":"1c7d8548-5763-4dad-a80a-69027aef1f92","Type":"ContainerStarted","Data":"e90dab765f967d424357a596e2944d5d7b3be60bccb7025199d73d784869dc64"} Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.564881 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2gnb9" Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.587186 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-c5h77" podStartSLOduration=2.145351837 podStartE2EDuration="5.587172254s" podCreationTimestamp="2025-10-02 21:38:06 +0000 UTC" firstStartedPulling="2025-10-02 21:38:07.669149274 +0000 UTC m=+878.992157293" lastFinishedPulling="2025-10-02 21:38:11.110969691 +0000 UTC m=+882.433977710" observedRunningTime="2025-10-02 21:38:11.586357002 +0000 UTC m=+882.909365021" watchObservedRunningTime="2025-10-02 21:38:11.587172254 +0000 UTC m=+882.910180273" Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.609584 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2gnb9"] Oct 02 21:38:11 crc kubenswrapper[4636]: I1002 21:38:11.619204 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-2gnb9"] Oct 02 21:38:13 crc kubenswrapper[4636]: I1002 21:38:13.197931 4636 scope.go:117] "RemoveContainer" containerID="7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439" Oct 02 21:38:13 crc kubenswrapper[4636]: E1002 21:38:13.198457 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439\": container with ID starting with 7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439 not found: ID does not exist" containerID="7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439" Oct 02 21:38:13 crc kubenswrapper[4636]: I1002 21:38:13.198489 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439"} err="failed to get container status \"7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439\": rpc error: code = NotFound desc = could not find container \"7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439\": container with ID starting with 7082ed8836e891a3aa1212ca715792fcb866ed9eb68db64f44220e46b0188439 not found: ID does not exist" Oct 02 21:38:13 crc kubenswrapper[4636]: I1002 21:38:13.617502 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8489c3b-7f53-413f-a04e-2909054e5d28" path="/var/lib/kubelet/pods/b8489c3b-7f53-413f-a04e-2909054e5d28/volumes" Oct 02 21:38:15 crc kubenswrapper[4636]: I1002 21:38:15.639373 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-2ngwv" Oct 02 21:38:15 crc kubenswrapper[4636]: I1002 21:38:15.651461 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-rrdgb" Oct 02 21:38:15 crc kubenswrapper[4636]: I1002 21:38:15.694414 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:38:15 crc kubenswrapper[4636]: I1002 21:38:15.694476 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:38:15 crc kubenswrapper[4636]: I1002 21:38:15.745517 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:38:16 crc kubenswrapper[4636]: I1002 21:38:16.654350 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:38:16 crc kubenswrapper[4636]: I1002 21:38:16.980776 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-c5h77" Oct 02 21:38:16 crc kubenswrapper[4636]: I1002 21:38:16.981051 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-c5h77" Oct 02 21:38:17 crc kubenswrapper[4636]: I1002 21:38:17.006361 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-c5h77" Oct 02 21:38:17 crc kubenswrapper[4636]: I1002 21:38:17.618675 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r69tz" event={"ID":"766fba84-e2b3-445e-b4bd-f9c3695bc872","Type":"ContainerStarted","Data":"aed33af5bf6de3fd430d7f47d247c1e599efb55b332de9eb03d9d395b8734fba"} Oct 02 21:38:17 crc kubenswrapper[4636]: I1002 21:38:17.653638 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-c5h77" Oct 02 21:38:18 crc kubenswrapper[4636]: I1002 21:38:18.632279 4636 generic.go:334] "Generic (PLEG): container finished" podID="766fba84-e2b3-445e-b4bd-f9c3695bc872" containerID="aed33af5bf6de3fd430d7f47d247c1e599efb55b332de9eb03d9d395b8734fba" exitCode=0 Oct 02 21:38:18 crc kubenswrapper[4636]: I1002 21:38:18.632395 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r69tz" event={"ID":"766fba84-e2b3-445e-b4bd-f9c3695bc872","Type":"ContainerDied","Data":"aed33af5bf6de3fd430d7f47d247c1e599efb55b332de9eb03d9d395b8734fba"} Oct 02 21:38:18 crc kubenswrapper[4636]: I1002 21:38:18.890296 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7wcwv"] Oct 02 21:38:18 crc kubenswrapper[4636]: I1002 21:38:18.890489 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7wcwv" podUID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" containerName="registry-server" containerID="cri-o://36b82bd5f285669bcee999c8a371adb1153536af613eef809b06b976686fc0f9" gracePeriod=2 Oct 02 21:38:20 crc kubenswrapper[4636]: I1002 21:38:20.651531 4636 generic.go:334] "Generic (PLEG): container finished" podID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" containerID="36b82bd5f285669bcee999c8a371adb1153536af613eef809b06b976686fc0f9" exitCode=0 Oct 02 21:38:20 crc kubenswrapper[4636]: I1002 21:38:20.651700 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wcwv" event={"ID":"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea","Type":"ContainerDied","Data":"36b82bd5f285669bcee999c8a371adb1153536af613eef809b06b976686fc0f9"} Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.205802 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.319959 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-utilities\") pod \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.320011 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-catalog-content\") pod \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.320055 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-527c9\" (UniqueName: \"kubernetes.io/projected/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-kube-api-access-527c9\") pod \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\" (UID: \"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea\") " Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.321050 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-utilities" (OuterVolumeSpecName: "utilities") pod "41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" (UID: "41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.324824 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-kube-api-access-527c9" (OuterVolumeSpecName: "kube-api-access-527c9") pod "41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" (UID: "41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea"). InnerVolumeSpecName "kube-api-access-527c9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.360122 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" (UID: "41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.421862 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.421891 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.421901 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-527c9\" (UniqueName: \"kubernetes.io/projected/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea-kube-api-access-527c9\") on node \"crc\" DevicePath \"\"" Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.659253 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wcwv" event={"ID":"41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea","Type":"ContainerDied","Data":"7301ef59cac34baa233cc47db6b96005efbe8d6db6154dc9990b7991e12e4e5d"} Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.659320 4636 scope.go:117] "RemoveContainer" containerID="36b82bd5f285669bcee999c8a371adb1153536af613eef809b06b976686fc0f9" Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.659441 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7wcwv" Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.680743 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7wcwv"] Oct 02 21:38:21 crc kubenswrapper[4636]: I1002 21:38:21.683998 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7wcwv"] Oct 02 21:38:22 crc kubenswrapper[4636]: I1002 21:38:22.041826 4636 scope.go:117] "RemoveContainer" containerID="3c1dfd8c31fce877a819381cd42d7b1c20af85aeea1b31822798ba0a0a07abbf" Oct 02 21:38:22 crc kubenswrapper[4636]: I1002 21:38:22.244502 4636 scope.go:117] "RemoveContainer" containerID="b9ca47808466f5ce64f97d26cc2a27955525afa4aadd0264bb27ee192ba76fb1" Oct 02 21:38:22 crc kubenswrapper[4636]: I1002 21:38:22.669651 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r69tz" event={"ID":"766fba84-e2b3-445e-b4bd-f9c3695bc872","Type":"ContainerStarted","Data":"ef9b5a76d00c9405f23fdcb5a40bec341fb6ac60a0b2690cbafee3281e7c1db5"} Oct 02 21:38:22 crc kubenswrapper[4636]: I1002 21:38:22.693104 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-r69tz" podStartSLOduration=2.002658955 podStartE2EDuration="13.693086755s" podCreationTimestamp="2025-10-02 21:38:09 +0000 UTC" firstStartedPulling="2025-10-02 21:38:10.553918404 +0000 UTC m=+881.876926423" lastFinishedPulling="2025-10-02 21:38:22.244346164 +0000 UTC m=+893.567354223" observedRunningTime="2025-10-02 21:38:22.690494645 +0000 UTC m=+894.013502664" watchObservedRunningTime="2025-10-02 21:38:22.693086755 +0000 UTC m=+894.016094784" Oct 02 21:38:23 crc kubenswrapper[4636]: I1002 21:38:23.117962 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:38:23 crc kubenswrapper[4636]: I1002 21:38:23.118048 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:38:23 crc kubenswrapper[4636]: I1002 21:38:23.613338 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" path="/var/lib/kubelet/pods/41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea/volumes" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.085266 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n"] Oct 02 21:38:24 crc kubenswrapper[4636]: E1002 21:38:24.085614 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" containerName="extract-content" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.085634 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" containerName="extract-content" Oct 02 21:38:24 crc kubenswrapper[4636]: E1002 21:38:24.085650 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" containerName="extract-utilities" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.085663 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" containerName="extract-utilities" Oct 02 21:38:24 crc kubenswrapper[4636]: E1002 21:38:24.085689 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8489c3b-7f53-413f-a04e-2909054e5d28" containerName="registry-server" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.085702 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8489c3b-7f53-413f-a04e-2909054e5d28" containerName="registry-server" Oct 02 21:38:24 crc kubenswrapper[4636]: E1002 21:38:24.085726 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" containerName="registry-server" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.085736 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" containerName="registry-server" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.085925 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="41c07b36-34a8-4866-bbc9-7ccbb5cbf4ea" containerName="registry-server" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.085948 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8489c3b-7f53-413f-a04e-2909054e5d28" containerName="registry-server" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.087313 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.089508 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-w5g66" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.096930 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n"] Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.261206 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-util\") pod \"62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.261261 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzcdz\" (UniqueName: \"kubernetes.io/projected/bbe6de43-e02c-4c6a-87aa-226f2385afc6-kube-api-access-pzcdz\") pod \"62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.261411 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-bundle\") pod \"62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.362940 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-bundle\") pod \"62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.362997 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-util\") pod \"62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.363030 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzcdz\" (UniqueName: \"kubernetes.io/projected/bbe6de43-e02c-4c6a-87aa-226f2385afc6-kube-api-access-pzcdz\") pod \"62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.363488 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-util\") pod \"62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.363517 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-bundle\") pod \"62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.379565 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzcdz\" (UniqueName: \"kubernetes.io/projected/bbe6de43-e02c-4c6a-87aa-226f2385afc6-kube-api-access-pzcdz\") pod \"62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.403589 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:24 crc kubenswrapper[4636]: I1002 21:38:24.837914 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n"] Oct 02 21:38:25 crc kubenswrapper[4636]: I1002 21:38:25.687618 4636 generic.go:334] "Generic (PLEG): container finished" podID="bbe6de43-e02c-4c6a-87aa-226f2385afc6" containerID="22b67f6872261865ab9e75e4c3384e74b27751317bed1cb2923ffdb9967c4b00" exitCode=0 Oct 02 21:38:25 crc kubenswrapper[4636]: I1002 21:38:25.688012 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" event={"ID":"bbe6de43-e02c-4c6a-87aa-226f2385afc6","Type":"ContainerDied","Data":"22b67f6872261865ab9e75e4c3384e74b27751317bed1cb2923ffdb9967c4b00"} Oct 02 21:38:25 crc kubenswrapper[4636]: I1002 21:38:25.688124 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" event={"ID":"bbe6de43-e02c-4c6a-87aa-226f2385afc6","Type":"ContainerStarted","Data":"5af9c2812418e0a1cb33ddb5d8153d763891dca9d487a275b559c0ed3c7aefd1"} Oct 02 21:38:29 crc kubenswrapper[4636]: I1002 21:38:29.862785 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:29 crc kubenswrapper[4636]: I1002 21:38:29.863153 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:29 crc kubenswrapper[4636]: I1002 21:38:29.911692 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:30 crc kubenswrapper[4636]: I1002 21:38:30.792590 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:31 crc kubenswrapper[4636]: I1002 21:38:31.722263 4636 generic.go:334] "Generic (PLEG): container finished" podID="bbe6de43-e02c-4c6a-87aa-226f2385afc6" containerID="30d7b286cf38943490e1bdabc5a4c0ab4f548253f3a18d4d4e9839f698636f85" exitCode=0 Oct 02 21:38:31 crc kubenswrapper[4636]: I1002 21:38:31.722345 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" event={"ID":"bbe6de43-e02c-4c6a-87aa-226f2385afc6","Type":"ContainerDied","Data":"30d7b286cf38943490e1bdabc5a4c0ab4f548253f3a18d4d4e9839f698636f85"} Oct 02 21:38:32 crc kubenswrapper[4636]: I1002 21:38:32.733032 4636 generic.go:334] "Generic (PLEG): container finished" podID="bbe6de43-e02c-4c6a-87aa-226f2385afc6" containerID="e7a41569c80f5166ef801f8f13261d6481e621d7ca084f8693afa12101702528" exitCode=0 Oct 02 21:38:32 crc kubenswrapper[4636]: I1002 21:38:32.733070 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" event={"ID":"bbe6de43-e02c-4c6a-87aa-226f2385afc6","Type":"ContainerDied","Data":"e7a41569c80f5166ef801f8f13261d6481e621d7ca084f8693afa12101702528"} Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.071109 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.204114 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-util\") pod \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.204164 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzcdz\" (UniqueName: \"kubernetes.io/projected/bbe6de43-e02c-4c6a-87aa-226f2385afc6-kube-api-access-pzcdz\") pod \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.204219 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-bundle\") pod \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\" (UID: \"bbe6de43-e02c-4c6a-87aa-226f2385afc6\") " Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.205520 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-bundle" (OuterVolumeSpecName: "bundle") pod "bbe6de43-e02c-4c6a-87aa-226f2385afc6" (UID: "bbe6de43-e02c-4c6a-87aa-226f2385afc6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.213632 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbe6de43-e02c-4c6a-87aa-226f2385afc6-kube-api-access-pzcdz" (OuterVolumeSpecName: "kube-api-access-pzcdz") pod "bbe6de43-e02c-4c6a-87aa-226f2385afc6" (UID: "bbe6de43-e02c-4c6a-87aa-226f2385afc6"). InnerVolumeSpecName "kube-api-access-pzcdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.214506 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-util" (OuterVolumeSpecName: "util") pod "bbe6de43-e02c-4c6a-87aa-226f2385afc6" (UID: "bbe6de43-e02c-4c6a-87aa-226f2385afc6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.305986 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzcdz\" (UniqueName: \"kubernetes.io/projected/bbe6de43-e02c-4c6a-87aa-226f2385afc6-kube-api-access-pzcdz\") on node \"crc\" DevicePath \"\"" Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.306026 4636 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.306036 4636 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bbe6de43-e02c-4c6a-87aa-226f2385afc6-util\") on node \"crc\" DevicePath \"\"" Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.745728 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" event={"ID":"bbe6de43-e02c-4c6a-87aa-226f2385afc6","Type":"ContainerDied","Data":"5af9c2812418e0a1cb33ddb5d8153d763891dca9d487a275b559c0ed3c7aefd1"} Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.745778 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5af9c2812418e0a1cb33ddb5d8153d763891dca9d487a275b559c0ed3c7aefd1" Oct 02 21:38:34 crc kubenswrapper[4636]: I1002 21:38:34.745789 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n" Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.063566 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r69tz"] Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.064313 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-r69tz" podUID="766fba84-e2b3-445e-b4bd-f9c3695bc872" containerName="registry-server" containerID="cri-o://ef9b5a76d00c9405f23fdcb5a40bec341fb6ac60a0b2690cbafee3281e7c1db5" gracePeriod=2 Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.767860 4636 generic.go:334] "Generic (PLEG): container finished" podID="766fba84-e2b3-445e-b4bd-f9c3695bc872" containerID="ef9b5a76d00c9405f23fdcb5a40bec341fb6ac60a0b2690cbafee3281e7c1db5" exitCode=0 Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.767923 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r69tz" event={"ID":"766fba84-e2b3-445e-b4bd-f9c3695bc872","Type":"ContainerDied","Data":"ef9b5a76d00c9405f23fdcb5a40bec341fb6ac60a0b2690cbafee3281e7c1db5"} Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.818262 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r"] Oct 02 21:38:37 crc kubenswrapper[4636]: E1002 21:38:37.818848 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbe6de43-e02c-4c6a-87aa-226f2385afc6" containerName="extract" Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.818871 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbe6de43-e02c-4c6a-87aa-226f2385afc6" containerName="extract" Oct 02 21:38:37 crc kubenswrapper[4636]: E1002 21:38:37.818898 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbe6de43-e02c-4c6a-87aa-226f2385afc6" containerName="util" Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.818906 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbe6de43-e02c-4c6a-87aa-226f2385afc6" containerName="util" Oct 02 21:38:37 crc kubenswrapper[4636]: E1002 21:38:37.818926 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbe6de43-e02c-4c6a-87aa-226f2385afc6" containerName="pull" Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.818934 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbe6de43-e02c-4c6a-87aa-226f2385afc6" containerName="pull" Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.819073 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbe6de43-e02c-4c6a-87aa-226f2385afc6" containerName="extract" Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.825727 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r" Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.833775 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-qk2rh" Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.847212 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r"] Oct 02 21:38:37 crc kubenswrapper[4636]: I1002 21:38:37.961030 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6rrq\" (UniqueName: \"kubernetes.io/projected/59487ce5-9c6e-41d8-9b9f-ee966b0579e4-kube-api-access-f6rrq\") pod \"openstack-operator-controller-operator-69cfffd9c8-f6h7r\" (UID: \"59487ce5-9c6e-41d8-9b9f-ee966b0579e4\") " pod="openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.062519 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6rrq\" (UniqueName: \"kubernetes.io/projected/59487ce5-9c6e-41d8-9b9f-ee966b0579e4-kube-api-access-f6rrq\") pod \"openstack-operator-controller-operator-69cfffd9c8-f6h7r\" (UID: \"59487ce5-9c6e-41d8-9b9f-ee966b0579e4\") " pod="openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.084301 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6rrq\" (UniqueName: \"kubernetes.io/projected/59487ce5-9c6e-41d8-9b9f-ee966b0579e4-kube-api-access-f6rrq\") pod \"openstack-operator-controller-operator-69cfffd9c8-f6h7r\" (UID: \"59487ce5-9c6e-41d8-9b9f-ee966b0579e4\") " pod="openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.084346 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.144382 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.266717 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-catalog-content\") pod \"766fba84-e2b3-445e-b4bd-f9c3695bc872\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.267272 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-utilities\") pod \"766fba84-e2b3-445e-b4bd-f9c3695bc872\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.267317 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvtxf\" (UniqueName: \"kubernetes.io/projected/766fba84-e2b3-445e-b4bd-f9c3695bc872-kube-api-access-fvtxf\") pod \"766fba84-e2b3-445e-b4bd-f9c3695bc872\" (UID: \"766fba84-e2b3-445e-b4bd-f9c3695bc872\") " Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.273388 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-utilities" (OuterVolumeSpecName: "utilities") pod "766fba84-e2b3-445e-b4bd-f9c3695bc872" (UID: "766fba84-e2b3-445e-b4bd-f9c3695bc872"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.276405 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/766fba84-e2b3-445e-b4bd-f9c3695bc872-kube-api-access-fvtxf" (OuterVolumeSpecName: "kube-api-access-fvtxf") pod "766fba84-e2b3-445e-b4bd-f9c3695bc872" (UID: "766fba84-e2b3-445e-b4bd-f9c3695bc872"). InnerVolumeSpecName "kube-api-access-fvtxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.310731 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "766fba84-e2b3-445e-b4bd-f9c3695bc872" (UID: "766fba84-e2b3-445e-b4bd-f9c3695bc872"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.369156 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.369197 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/766fba84-e2b3-445e-b4bd-f9c3695bc872-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.369210 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvtxf\" (UniqueName: \"kubernetes.io/projected/766fba84-e2b3-445e-b4bd-f9c3695bc872-kube-api-access-fvtxf\") on node \"crc\" DevicePath \"\"" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.633133 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r"] Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.782962 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r69tz" event={"ID":"766fba84-e2b3-445e-b4bd-f9c3695bc872","Type":"ContainerDied","Data":"a654f5505db7e2b023f17b9a6770e2833759972a47e355c233073523341bac32"} Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.783019 4636 scope.go:117] "RemoveContainer" containerID="ef9b5a76d00c9405f23fdcb5a40bec341fb6ac60a0b2690cbafee3281e7c1db5" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.783023 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r69tz" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.784629 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r" event={"ID":"59487ce5-9c6e-41d8-9b9f-ee966b0579e4","Type":"ContainerStarted","Data":"4e25240a2308b4414038bfbddfbbc4e6412c36d974c4f2960e20674bb79808d2"} Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.810800 4636 scope.go:117] "RemoveContainer" containerID="aed33af5bf6de3fd430d7f47d247c1e599efb55b332de9eb03d9d395b8734fba" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.840975 4636 scope.go:117] "RemoveContainer" containerID="7672790f98704470fb816d42e90334a5435eaa52431bab814eec4c127e7b0606" Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.843293 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r69tz"] Oct 02 21:38:38 crc kubenswrapper[4636]: I1002 21:38:38.850286 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-r69tz"] Oct 02 21:38:39 crc kubenswrapper[4636]: I1002 21:38:39.639106 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="766fba84-e2b3-445e-b4bd-f9c3695bc872" path="/var/lib/kubelet/pods/766fba84-e2b3-445e-b4bd-f9c3695bc872/volumes" Oct 02 21:38:45 crc kubenswrapper[4636]: I1002 21:38:45.830774 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r" event={"ID":"59487ce5-9c6e-41d8-9b9f-ee966b0579e4","Type":"ContainerStarted","Data":"98928294bf544a054baec1c74fdea7bb5f05ab6eaff1f595f6e8cf980bcf14c5"} Oct 02 21:38:47 crc kubenswrapper[4636]: I1002 21:38:47.846066 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r" event={"ID":"59487ce5-9c6e-41d8-9b9f-ee966b0579e4","Type":"ContainerStarted","Data":"04f50651fd2a363b02257a736da4d1e6b83b7557a0e5a52c5fd180b530d0c6c1"} Oct 02 21:38:47 crc kubenswrapper[4636]: I1002 21:38:47.847596 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r" Oct 02 21:38:47 crc kubenswrapper[4636]: I1002 21:38:47.877222 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r" podStartSLOduration=2.2765298019999998 podStartE2EDuration="10.877205784s" podCreationTimestamp="2025-10-02 21:38:37 +0000 UTC" firstStartedPulling="2025-10-02 21:38:38.656437452 +0000 UTC m=+909.979445471" lastFinishedPulling="2025-10-02 21:38:47.257113434 +0000 UTC m=+918.580121453" observedRunningTime="2025-10-02 21:38:47.876432863 +0000 UTC m=+919.199440902" watchObservedRunningTime="2025-10-02 21:38:47.877205784 +0000 UTC m=+919.200213803" Oct 02 21:38:53 crc kubenswrapper[4636]: I1002 21:38:53.117804 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:38:53 crc kubenswrapper[4636]: I1002 21:38:53.119898 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:38:58 crc kubenswrapper[4636]: I1002 21:38:58.147704 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-69cfffd9c8-f6h7r" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.000780 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz"] Oct 02 21:39:20 crc kubenswrapper[4636]: E1002 21:39:20.007148 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="766fba84-e2b3-445e-b4bd-f9c3695bc872" containerName="registry-server" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.007381 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="766fba84-e2b3-445e-b4bd-f9c3695bc872" containerName="registry-server" Oct 02 21:39:20 crc kubenswrapper[4636]: E1002 21:39:20.007503 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="766fba84-e2b3-445e-b4bd-f9c3695bc872" containerName="extract-content" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.007856 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="766fba84-e2b3-445e-b4bd-f9c3695bc872" containerName="extract-content" Oct 02 21:39:20 crc kubenswrapper[4636]: E1002 21:39:20.007983 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="766fba84-e2b3-445e-b4bd-f9c3695bc872" containerName="extract-utilities" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.008140 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="766fba84-e2b3-445e-b4bd-f9c3695bc872" containerName="extract-utilities" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.010201 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="766fba84-e2b3-445e-b4bd-f9c3695bc872" containerName="registry-server" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.011095 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.014825 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.015613 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-cfq9c" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.016682 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.019571 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-2qhz4" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.036393 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.048945 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.054007 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.055166 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.059143 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-44hdb" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.072493 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.073411 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.075615 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-7x5lv" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.121827 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.126533 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-7ntw8"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.127668 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-599898f689-7ntw8" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.130660 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-l58sr" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.133446 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-899wq\" (UniqueName: \"kubernetes.io/projected/b29f844c-ab64-4edb-9dee-0d19e4cb78d0-kube-api-access-899wq\") pod \"barbican-operator-controller-manager-6c675fb79f-b52pz\" (UID: \"b29f844c-ab64-4edb-9dee-0d19e4cb78d0\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.133498 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwpd5\" (UniqueName: \"kubernetes.io/projected/330ebc0e-6515-4a61-9d41-43f11247c659-kube-api-access-lwpd5\") pod \"designate-operator-controller-manager-75dfd9b554-r4gzg\" (UID: \"330ebc0e-6515-4a61-9d41-43f11247c659\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.133534 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhkdv\" (UniqueName: \"kubernetes.io/projected/405ad6d4-7f81-48b8-a8a9-ae1d8062f078-kube-api-access-jhkdv\") pod \"glance-operator-controller-manager-846dff85b5-lc2st\" (UID: \"405ad6d4-7f81-48b8-a8a9-ae1d8062f078\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.133554 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw26w\" (UniqueName: \"kubernetes.io/projected/26738e55-423c-4cab-821d-b9abb0d8a026-kube-api-access-kw26w\") pod \"cinder-operator-controller-manager-79d68d6c85-l889h\" (UID: \"26738e55-423c-4cab-821d-b9abb0d8a026\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.146176 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-7ntw8"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.153593 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.167563 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.168487 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.181064 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-n2mz2" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.188611 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.189601 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.192331 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.192848 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-tzrk8" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.195375 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.214284 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.234382 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j4tz\" (UniqueName: \"kubernetes.io/projected/28a9b36b-5645-4d73-883a-87028af9455f-kube-api-access-2j4tz\") pod \"heat-operator-controller-manager-599898f689-7ntw8\" (UID: \"28a9b36b-5645-4d73-883a-87028af9455f\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-7ntw8" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.234438 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-899wq\" (UniqueName: \"kubernetes.io/projected/b29f844c-ab64-4edb-9dee-0d19e4cb78d0-kube-api-access-899wq\") pod \"barbican-operator-controller-manager-6c675fb79f-b52pz\" (UID: \"b29f844c-ab64-4edb-9dee-0d19e4cb78d0\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.234478 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwpd5\" (UniqueName: \"kubernetes.io/projected/330ebc0e-6515-4a61-9d41-43f11247c659-kube-api-access-lwpd5\") pod \"designate-operator-controller-manager-75dfd9b554-r4gzg\" (UID: \"330ebc0e-6515-4a61-9d41-43f11247c659\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.234508 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhkdv\" (UniqueName: \"kubernetes.io/projected/405ad6d4-7f81-48b8-a8a9-ae1d8062f078-kube-api-access-jhkdv\") pod \"glance-operator-controller-manager-846dff85b5-lc2st\" (UID: \"405ad6d4-7f81-48b8-a8a9-ae1d8062f078\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.234530 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw26w\" (UniqueName: \"kubernetes.io/projected/26738e55-423c-4cab-821d-b9abb0d8a026-kube-api-access-kw26w\") pod \"cinder-operator-controller-manager-79d68d6c85-l889h\" (UID: \"26738e55-423c-4cab-821d-b9abb0d8a026\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.234553 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxn7r\" (UniqueName: \"kubernetes.io/projected/a10a56aa-5ebe-4f98-8f99-fcba515c254d-kube-api-access-dxn7r\") pod \"horizon-operator-controller-manager-6769b867d9-d74cn\" (UID: \"a10a56aa-5ebe-4f98-8f99-fcba515c254d\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.291095 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.336119 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.293011 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwpd5\" (UniqueName: \"kubernetes.io/projected/330ebc0e-6515-4a61-9d41-43f11247c659-kube-api-access-lwpd5\") pod \"designate-operator-controller-manager-75dfd9b554-r4gzg\" (UID: \"330ebc0e-6515-4a61-9d41-43f11247c659\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.318870 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-899wq\" (UniqueName: \"kubernetes.io/projected/b29f844c-ab64-4edb-9dee-0d19e4cb78d0-kube-api-access-899wq\") pod \"barbican-operator-controller-manager-6c675fb79f-b52pz\" (UID: \"b29f844c-ab64-4edb-9dee-0d19e4cb78d0\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.319635 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw26w\" (UniqueName: \"kubernetes.io/projected/26738e55-423c-4cab-821d-b9abb0d8a026-kube-api-access-kw26w\") pod \"cinder-operator-controller-manager-79d68d6c85-l889h\" (UID: \"26738e55-423c-4cab-821d-b9abb0d8a026\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.336580 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.311234 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhkdv\" (UniqueName: \"kubernetes.io/projected/405ad6d4-7f81-48b8-a8a9-ae1d8062f078-kube-api-access-jhkdv\") pod \"glance-operator-controller-manager-846dff85b5-lc2st\" (UID: \"405ad6d4-7f81-48b8-a8a9-ae1d8062f078\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.370741 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.383224 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.383815 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-njrlc" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.384283 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxn7r\" (UniqueName: \"kubernetes.io/projected/a10a56aa-5ebe-4f98-8f99-fcba515c254d-kube-api-access-dxn7r\") pod \"horizon-operator-controller-manager-6769b867d9-d74cn\" (UID: \"a10a56aa-5ebe-4f98-8f99-fcba515c254d\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.384362 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srfzk\" (UniqueName: \"kubernetes.io/projected/994cd5bb-107d-4426-9549-f5805479b1d7-kube-api-access-srfzk\") pod \"infra-operator-controller-manager-5fbf469cd7-7vrb4\" (UID: \"994cd5bb-107d-4426-9549-f5805479b1d7\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.384432 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j4tz\" (UniqueName: \"kubernetes.io/projected/28a9b36b-5645-4d73-883a-87028af9455f-kube-api-access-2j4tz\") pod \"heat-operator-controller-manager-599898f689-7ntw8\" (UID: \"28a9b36b-5645-4d73-883a-87028af9455f\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-7ntw8" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.384462 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/994cd5bb-107d-4426-9549-f5805479b1d7-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-7vrb4\" (UID: \"994cd5bb-107d-4426-9549-f5805479b1d7\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.385021 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.386725 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.387557 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.387691 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.395080 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-pnxb4" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.396701 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-vqdl9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.396842 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.398067 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.401363 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-4l6j9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.406658 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.407341 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.444190 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxn7r\" (UniqueName: \"kubernetes.io/projected/a10a56aa-5ebe-4f98-8f99-fcba515c254d-kube-api-access-dxn7r\") pod \"horizon-operator-controller-manager-6769b867d9-d74cn\" (UID: \"a10a56aa-5ebe-4f98-8f99-fcba515c254d\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.449037 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j4tz\" (UniqueName: \"kubernetes.io/projected/28a9b36b-5645-4d73-883a-87028af9455f-kube-api-access-2j4tz\") pod \"heat-operator-controller-manager-599898f689-7ntw8\" (UID: \"28a9b36b-5645-4d73-883a-87028af9455f\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-7ntw8" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.456966 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.465541 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.465583 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.468283 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.469295 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.472555 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-h9zbx" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.477340 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-599898f689-7ntw8" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.479030 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.482253 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.482337 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.488771 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-frx45" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.488917 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.489529 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxffp\" (UniqueName: \"kubernetes.io/projected/a6a3569c-020b-4cf3-8895-8f31e98bae75-kube-api-access-mxffp\") pod \"keystone-operator-controller-manager-7f55849f88-ccnnb\" (UID: \"a6a3569c-020b-4cf3-8895-8f31e98bae75\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.489575 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkmfh\" (UniqueName: \"kubernetes.io/projected/1f1e85fe-02dd-423c-991d-fafb30119601-kube-api-access-zkmfh\") pod \"manila-operator-controller-manager-6fd6854b49-h8kh7\" (UID: \"1f1e85fe-02dd-423c-991d-fafb30119601\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.489599 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jhjh\" (UniqueName: \"kubernetes.io/projected/a7b69bb7-2b74-4724-b459-215b7d515840-kube-api-access-2jhjh\") pod \"ironic-operator-controller-manager-84bc9db6cc-dpx7v\" (UID: \"a7b69bb7-2b74-4724-b459-215b7d515840\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.489635 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srfzk\" (UniqueName: \"kubernetes.io/projected/994cd5bb-107d-4426-9549-f5805479b1d7-kube-api-access-srfzk\") pod \"infra-operator-controller-manager-5fbf469cd7-7vrb4\" (UID: \"994cd5bb-107d-4426-9549-f5805479b1d7\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.489671 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/994cd5bb-107d-4426-9549-f5805479b1d7-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-7vrb4\" (UID: \"994cd5bb-107d-4426-9549-f5805479b1d7\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" Oct 02 21:39:20 crc kubenswrapper[4636]: E1002 21:39:20.490581 4636 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.490977 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz"] Oct 02 21:39:20 crc kubenswrapper[4636]: E1002 21:39:20.491354 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/994cd5bb-107d-4426-9549-f5805479b1d7-cert podName:994cd5bb-107d-4426-9549-f5805479b1d7 nodeName:}" failed. No retries permitted until 2025-10-02 21:39:20.991336063 +0000 UTC m=+952.314344082 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/994cd5bb-107d-4426-9549-f5805479b1d7-cert") pod "infra-operator-controller-manager-5fbf469cd7-7vrb4" (UID: "994cd5bb-107d-4426-9549-f5805479b1d7") : secret "infra-operator-webhook-server-cert" not found Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.492069 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.495310 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.502065 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-2tp2j" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.506477 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.548122 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.548991 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.556825 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.557026 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-w29px" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.557397 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.558380 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.558899 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srfzk\" (UniqueName: \"kubernetes.io/projected/994cd5bb-107d-4426-9549-f5805479b1d7-kube-api-access-srfzk\") pod \"infra-operator-controller-manager-5fbf469cd7-7vrb4\" (UID: \"994cd5bb-107d-4426-9549-f5805479b1d7\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.564484 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-s2cx8" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.590792 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8m4j\" (UniqueName: \"kubernetes.io/projected/a0b38373-c0c9-4fd9-ad1b-d7af382f370b-kube-api-access-r8m4j\") pod \"mariadb-operator-controller-manager-5c468bf4d4-j7mxj\" (UID: \"a0b38373-c0c9-4fd9-ad1b-d7af382f370b\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.590844 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxffp\" (UniqueName: \"kubernetes.io/projected/a6a3569c-020b-4cf3-8895-8f31e98bae75-kube-api-access-mxffp\") pod \"keystone-operator-controller-manager-7f55849f88-ccnnb\" (UID: \"a6a3569c-020b-4cf3-8895-8f31e98bae75\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.590871 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcddx\" (UniqueName: \"kubernetes.io/projected/8bf57ebc-7603-43a7-8bad-f52c3ad98ecf-kube-api-access-mcddx\") pod \"octavia-operator-controller-manager-59d6cfdf45-spqgz\" (UID: \"8bf57ebc-7603-43a7-8bad-f52c3ad98ecf\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.590901 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkmfh\" (UniqueName: \"kubernetes.io/projected/1f1e85fe-02dd-423c-991d-fafb30119601-kube-api-access-zkmfh\") pod \"manila-operator-controller-manager-6fd6854b49-h8kh7\" (UID: \"1f1e85fe-02dd-423c-991d-fafb30119601\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.590919 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jhjh\" (UniqueName: \"kubernetes.io/projected/a7b69bb7-2b74-4724-b459-215b7d515840-kube-api-access-2jhjh\") pod \"ironic-operator-controller-manager-84bc9db6cc-dpx7v\" (UID: \"a7b69bb7-2b74-4724-b459-215b7d515840\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.590949 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vqf5\" (UniqueName: \"kubernetes.io/projected/a50690d6-0e31-4944-9011-35076543304f-kube-api-access-4vqf5\") pod \"neutron-operator-controller-manager-6574bf987d-zjsn9\" (UID: \"a50690d6-0e31-4944-9011-35076543304f\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.590978 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5f4g\" (UniqueName: \"kubernetes.io/projected/20dffa00-5117-4d3e-8b67-467357444816-kube-api-access-p5f4g\") pod \"nova-operator-controller-manager-555c7456bd-z4lx2\" (UID: \"20dffa00-5117-4d3e-8b67-467357444816\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.593881 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.594990 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.598553 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.598739 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-fcc2s" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.617049 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jhjh\" (UniqueName: \"kubernetes.io/projected/a7b69bb7-2b74-4724-b459-215b7d515840-kube-api-access-2jhjh\") pod \"ironic-operator-controller-manager-84bc9db6cc-dpx7v\" (UID: \"a7b69bb7-2b74-4724-b459-215b7d515840\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.620602 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.621535 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxffp\" (UniqueName: \"kubernetes.io/projected/a6a3569c-020b-4cf3-8895-8f31e98bae75-kube-api-access-mxffp\") pod \"keystone-operator-controller-manager-7f55849f88-ccnnb\" (UID: \"a6a3569c-020b-4cf3-8895-8f31e98bae75\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.631306 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkmfh\" (UniqueName: \"kubernetes.io/projected/1f1e85fe-02dd-423c-991d-fafb30119601-kube-api-access-zkmfh\") pod \"manila-operator-controller-manager-6fd6854b49-h8kh7\" (UID: \"1f1e85fe-02dd-423c-991d-fafb30119601\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.659614 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.693202 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.694487 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.694695 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vqf5\" (UniqueName: \"kubernetes.io/projected/a50690d6-0e31-4944-9011-35076543304f-kube-api-access-4vqf5\") pod \"neutron-operator-controller-manager-6574bf987d-zjsn9\" (UID: \"a50690d6-0e31-4944-9011-35076543304f\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.694739 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmbg4\" (UniqueName: \"kubernetes.io/projected/8be5d41d-4585-496e-b2a1-1f6514902e5a-kube-api-access-vmbg4\") pod \"placement-operator-controller-manager-7d8bb7f44c-j7dds\" (UID: \"8be5d41d-4585-496e-b2a1-1f6514902e5a\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.694779 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5f4g\" (UniqueName: \"kubernetes.io/projected/20dffa00-5117-4d3e-8b67-467357444816-kube-api-access-p5f4g\") pod \"nova-operator-controller-manager-555c7456bd-z4lx2\" (UID: \"20dffa00-5117-4d3e-8b67-467357444816\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.694822 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8m4j\" (UniqueName: \"kubernetes.io/projected/a0b38373-c0c9-4fd9-ad1b-d7af382f370b-kube-api-access-r8m4j\") pod \"mariadb-operator-controller-manager-5c468bf4d4-j7mxj\" (UID: \"a0b38373-c0c9-4fd9-ad1b-d7af382f370b\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.694844 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp7cn\" (UniqueName: \"kubernetes.io/projected/46307289-bc17-4194-8ffa-e0cccfbb675c-kube-api-access-kp7cn\") pod \"ovn-operator-controller-manager-688db7b6c7-4gd5n\" (UID: \"46307289-bc17-4194-8ffa-e0cccfbb675c\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.694872 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8281915-fca8-4081-9240-78d83b1fb453-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9\" (UID: \"b8281915-fca8-4081-9240-78d83b1fb453\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.694899 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcddx\" (UniqueName: \"kubernetes.io/projected/8bf57ebc-7603-43a7-8bad-f52c3ad98ecf-kube-api-access-mcddx\") pod \"octavia-operator-controller-manager-59d6cfdf45-spqgz\" (UID: \"8bf57ebc-7603-43a7-8bad-f52c3ad98ecf\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.694929 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m7fw\" (UniqueName: \"kubernetes.io/projected/b8281915-fca8-4081-9240-78d83b1fb453-kube-api-access-8m7fw\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9\" (UID: \"b8281915-fca8-4081-9240-78d83b1fb453\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.702212 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.710123 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-jxw4z" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.720849 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.748900 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5f4g\" (UniqueName: \"kubernetes.io/projected/20dffa00-5117-4d3e-8b67-467357444816-kube-api-access-p5f4g\") pod \"nova-operator-controller-manager-555c7456bd-z4lx2\" (UID: \"20dffa00-5117-4d3e-8b67-467357444816\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.753469 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vqf5\" (UniqueName: \"kubernetes.io/projected/a50690d6-0e31-4944-9011-35076543304f-kube-api-access-4vqf5\") pod \"neutron-operator-controller-manager-6574bf987d-zjsn9\" (UID: \"a50690d6-0e31-4944-9011-35076543304f\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.759212 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcddx\" (UniqueName: \"kubernetes.io/projected/8bf57ebc-7603-43a7-8bad-f52c3ad98ecf-kube-api-access-mcddx\") pod \"octavia-operator-controller-manager-59d6cfdf45-spqgz\" (UID: \"8bf57ebc-7603-43a7-8bad-f52c3ad98ecf\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.759545 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.768551 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8m4j\" (UniqueName: \"kubernetes.io/projected/a0b38373-c0c9-4fd9-ad1b-d7af382f370b-kube-api-access-r8m4j\") pod \"mariadb-operator-controller-manager-5c468bf4d4-j7mxj\" (UID: \"a0b38373-c0c9-4fd9-ad1b-d7af382f370b\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.779803 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.780852 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.790677 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-nzhff" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.794118 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.795514 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlc2z\" (UniqueName: \"kubernetes.io/projected/c82f218c-6788-4558-b8b0-a375592d9377-kube-api-access-tlc2z\") pod \"swift-operator-controller-manager-6859f9b676-bqhzt\" (UID: \"c82f218c-6788-4558-b8b0-a375592d9377\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.795550 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmbg4\" (UniqueName: \"kubernetes.io/projected/8be5d41d-4585-496e-b2a1-1f6514902e5a-kube-api-access-vmbg4\") pod \"placement-operator-controller-manager-7d8bb7f44c-j7dds\" (UID: \"8be5d41d-4585-496e-b2a1-1f6514902e5a\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.795598 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp7cn\" (UniqueName: \"kubernetes.io/projected/46307289-bc17-4194-8ffa-e0cccfbb675c-kube-api-access-kp7cn\") pod \"ovn-operator-controller-manager-688db7b6c7-4gd5n\" (UID: \"46307289-bc17-4194-8ffa-e0cccfbb675c\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.795626 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8281915-fca8-4081-9240-78d83b1fb453-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9\" (UID: \"b8281915-fca8-4081-9240-78d83b1fb453\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.795663 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m7fw\" (UniqueName: \"kubernetes.io/projected/b8281915-fca8-4081-9240-78d83b1fb453-kube-api-access-8m7fw\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9\" (UID: \"b8281915-fca8-4081-9240-78d83b1fb453\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" Oct 02 21:39:20 crc kubenswrapper[4636]: E1002 21:39:20.796234 4636 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 02 21:39:20 crc kubenswrapper[4636]: E1002 21:39:20.796272 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8281915-fca8-4081-9240-78d83b1fb453-cert podName:b8281915-fca8-4081-9240-78d83b1fb453 nodeName:}" failed. No retries permitted until 2025-10-02 21:39:21.29625873 +0000 UTC m=+952.619266749 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8281915-fca8-4081-9240-78d83b1fb453-cert") pod "openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" (UID: "b8281915-fca8-4081-9240-78d83b1fb453") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.811973 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.839993 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.844057 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp7cn\" (UniqueName: \"kubernetes.io/projected/46307289-bc17-4194-8ffa-e0cccfbb675c-kube-api-access-kp7cn\") pod \"ovn-operator-controller-manager-688db7b6c7-4gd5n\" (UID: \"46307289-bc17-4194-8ffa-e0cccfbb675c\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.858634 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m7fw\" (UniqueName: \"kubernetes.io/projected/b8281915-fca8-4081-9240-78d83b1fb453-kube-api-access-8m7fw\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9\" (UID: \"b8281915-fca8-4081-9240-78d83b1fb453\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.858725 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmbg4\" (UniqueName: \"kubernetes.io/projected/8be5d41d-4585-496e-b2a1-1f6514902e5a-kube-api-access-vmbg4\") pod \"placement-operator-controller-manager-7d8bb7f44c-j7dds\" (UID: \"8be5d41d-4585-496e-b2a1-1f6514902e5a\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.865050 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.890413 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.899856 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6tf2\" (UniqueName: \"kubernetes.io/projected/e439d19a-9b7e-4245-baa8-1fb4a6c1f56a-kube-api-access-l6tf2\") pod \"telemetry-operator-controller-manager-5db5cf686f-j44cm\" (UID: \"e439d19a-9b7e-4245-baa8-1fb4a6c1f56a\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.899907 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlc2z\" (UniqueName: \"kubernetes.io/projected/c82f218c-6788-4558-b8b0-a375592d9377-kube-api-access-tlc2z\") pod \"swift-operator-controller-manager-6859f9b676-bqhzt\" (UID: \"c82f218c-6788-4558-b8b0-a375592d9377\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.908555 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5"] Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.910302 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.916616 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.935519 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlc2z\" (UniqueName: \"kubernetes.io/projected/c82f218c-6788-4558-b8b0-a375592d9377-kube-api-access-tlc2z\") pod \"swift-operator-controller-manager-6859f9b676-bqhzt\" (UID: \"c82f218c-6788-4558-b8b0-a375592d9377\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.949002 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-hkm5g" Oct 02 21:39:20 crc kubenswrapper[4636]: I1002 21:39:20.989487 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5"] Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.002214 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/994cd5bb-107d-4426-9549-f5805479b1d7-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-7vrb4\" (UID: \"994cd5bb-107d-4426-9549-f5805479b1d7\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.002307 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6tf2\" (UniqueName: \"kubernetes.io/projected/e439d19a-9b7e-4245-baa8-1fb4a6c1f56a-kube-api-access-l6tf2\") pod \"telemetry-operator-controller-manager-5db5cf686f-j44cm\" (UID: \"e439d19a-9b7e-4245-baa8-1fb4a6c1f56a\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.011086 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.037540 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6tf2\" (UniqueName: \"kubernetes.io/projected/e439d19a-9b7e-4245-baa8-1fb4a6c1f56a-kube-api-access-l6tf2\") pod \"telemetry-operator-controller-manager-5db5cf686f-j44cm\" (UID: \"e439d19a-9b7e-4245-baa8-1fb4a6c1f56a\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.038615 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/994cd5bb-107d-4426-9549-f5805479b1d7-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-7vrb4\" (UID: \"994cd5bb-107d-4426-9549-f5805479b1d7\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.043121 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.043602 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs"] Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.058916 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.074698 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-x4285" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.083194 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.103802 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs"] Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.114139 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fkv4\" (UniqueName: \"kubernetes.io/projected/31a7600e-116a-458a-9102-0e4207fcc73c-kube-api-access-6fkv4\") pod \"test-operator-controller-manager-5cd5cb47d7-g9jx5\" (UID: \"31a7600e-116a-458a-9102-0e4207fcc73c\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.114492 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.123629 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.148114 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t"] Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.174647 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t"] Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.174776 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.178607 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-cm8lb" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.178804 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.182547 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg"] Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.183431 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.186305 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-pzsvv" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.189643 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg"] Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.203044 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz"] Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.216466 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8z28\" (UniqueName: \"kubernetes.io/projected/0d6cbc2a-0f5b-439d-963e-936e0a6a843d-kube-api-access-k8z28\") pod \"watcher-operator-controller-manager-fcd7d9895-pdhqs\" (UID: \"0d6cbc2a-0f5b-439d-963e-936e0a6a843d\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.216561 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fkv4\" (UniqueName: \"kubernetes.io/projected/31a7600e-116a-458a-9102-0e4207fcc73c-kube-api-access-6fkv4\") pod \"test-operator-controller-manager-5cd5cb47d7-g9jx5\" (UID: \"31a7600e-116a-458a-9102-0e4207fcc73c\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.311843 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fkv4\" (UniqueName: \"kubernetes.io/projected/31a7600e-116a-458a-9102-0e4207fcc73c-kube-api-access-6fkv4\") pod \"test-operator-controller-manager-5cd5cb47d7-g9jx5\" (UID: \"31a7600e-116a-458a-9102-0e4207fcc73c\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.317421 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnqs9\" (UniqueName: \"kubernetes.io/projected/ccf677d0-a6b1-44ad-9d02-70fc42842c3a-kube-api-access-rnqs9\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg\" (UID: \"ccf677d0-a6b1-44ad-9d02-70fc42842c3a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.317477 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/72e01c24-5096-468f-8a34-36db5383bf10-cert\") pod \"openstack-operator-controller-manager-8598b6c48f-b7l8t\" (UID: \"72e01c24-5096-468f-8a34-36db5383bf10\") " pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.317505 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n2xv\" (UniqueName: \"kubernetes.io/projected/72e01c24-5096-468f-8a34-36db5383bf10-kube-api-access-7n2xv\") pod \"openstack-operator-controller-manager-8598b6c48f-b7l8t\" (UID: \"72e01c24-5096-468f-8a34-36db5383bf10\") " pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.317546 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8281915-fca8-4081-9240-78d83b1fb453-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9\" (UID: \"b8281915-fca8-4081-9240-78d83b1fb453\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.317578 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8z28\" (UniqueName: \"kubernetes.io/projected/0d6cbc2a-0f5b-439d-963e-936e0a6a843d-kube-api-access-k8z28\") pod \"watcher-operator-controller-manager-fcd7d9895-pdhqs\" (UID: \"0d6cbc2a-0f5b-439d-963e-936e0a6a843d\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.334711 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8281915-fca8-4081-9240-78d83b1fb453-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9\" (UID: \"b8281915-fca8-4081-9240-78d83b1fb453\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.365872 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8z28\" (UniqueName: \"kubernetes.io/projected/0d6cbc2a-0f5b-439d-963e-936e0a6a843d-kube-api-access-k8z28\") pod \"watcher-operator-controller-manager-fcd7d9895-pdhqs\" (UID: \"0d6cbc2a-0f5b-439d-963e-936e0a6a843d\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.403027 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.418436 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n2xv\" (UniqueName: \"kubernetes.io/projected/72e01c24-5096-468f-8a34-36db5383bf10-kube-api-access-7n2xv\") pod \"openstack-operator-controller-manager-8598b6c48f-b7l8t\" (UID: \"72e01c24-5096-468f-8a34-36db5383bf10\") " pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.418749 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnqs9\" (UniqueName: \"kubernetes.io/projected/ccf677d0-a6b1-44ad-9d02-70fc42842c3a-kube-api-access-rnqs9\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg\" (UID: \"ccf677d0-a6b1-44ad-9d02-70fc42842c3a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.418852 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/72e01c24-5096-468f-8a34-36db5383bf10-cert\") pod \"openstack-operator-controller-manager-8598b6c48f-b7l8t\" (UID: \"72e01c24-5096-468f-8a34-36db5383bf10\") " pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" Oct 02 21:39:21 crc kubenswrapper[4636]: E1002 21:39:21.419048 4636 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 02 21:39:21 crc kubenswrapper[4636]: E1002 21:39:21.419150 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72e01c24-5096-468f-8a34-36db5383bf10-cert podName:72e01c24-5096-468f-8a34-36db5383bf10 nodeName:}" failed. No retries permitted until 2025-10-02 21:39:21.919130804 +0000 UTC m=+953.242138823 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/72e01c24-5096-468f-8a34-36db5383bf10-cert") pod "openstack-operator-controller-manager-8598b6c48f-b7l8t" (UID: "72e01c24-5096-468f-8a34-36db5383bf10") : secret "webhook-server-cert" not found Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.445384 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg"] Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.452342 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n2xv\" (UniqueName: \"kubernetes.io/projected/72e01c24-5096-468f-8a34-36db5383bf10-kube-api-access-7n2xv\") pod \"openstack-operator-controller-manager-8598b6c48f-b7l8t\" (UID: \"72e01c24-5096-468f-8a34-36db5383bf10\") " pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.460745 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnqs9\" (UniqueName: \"kubernetes.io/projected/ccf677d0-a6b1-44ad-9d02-70fc42842c3a-kube-api-access-rnqs9\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg\" (UID: \"ccf677d0-a6b1-44ad-9d02-70fc42842c3a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.495426 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h"] Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.530233 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.557817 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.658283 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.744496 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st"] Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.753547 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7"] Oct 02 21:39:21 crc kubenswrapper[4636]: W1002 21:39:21.870916 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod405ad6d4_7f81_48b8_a8a9_ae1d8062f078.slice/crio-83855ec46de22c3a14cf2d8f2006cbd5996f42e2e59fabeee48fcc35eb6b8ac5 WatchSource:0}: Error finding container 83855ec46de22c3a14cf2d8f2006cbd5996f42e2e59fabeee48fcc35eb6b8ac5: Status 404 returned error can't find the container with id 83855ec46de22c3a14cf2d8f2006cbd5996f42e2e59fabeee48fcc35eb6b8ac5 Oct 02 21:39:21 crc kubenswrapper[4636]: W1002 21:39:21.877939 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f1e85fe_02dd_423c_991d_fafb30119601.slice/crio-fcc32758020978bae47057616af5b1019e77aefd7504eb3c49cd3eebceea8247 WatchSource:0}: Error finding container fcc32758020978bae47057616af5b1019e77aefd7504eb3c49cd3eebceea8247: Status 404 returned error can't find the container with id fcc32758020978bae47057616af5b1019e77aefd7504eb3c49cd3eebceea8247 Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.942659 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/72e01c24-5096-468f-8a34-36db5383bf10-cert\") pod \"openstack-operator-controller-manager-8598b6c48f-b7l8t\" (UID: \"72e01c24-5096-468f-8a34-36db5383bf10\") " pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" Oct 02 21:39:21 crc kubenswrapper[4636]: I1002 21:39:21.979334 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/72e01c24-5096-468f-8a34-36db5383bf10-cert\") pod \"openstack-operator-controller-manager-8598b6c48f-b7l8t\" (UID: \"72e01c24-5096-468f-8a34-36db5383bf10\") " pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.115749 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" event={"ID":"26738e55-423c-4cab-821d-b9abb0d8a026","Type":"ContainerStarted","Data":"677387fbaac6d5fa4f860e074d376d26e8843813d07bb072cdeeaf601309cb7a"} Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.120412 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" event={"ID":"330ebc0e-6515-4a61-9d41-43f11247c659","Type":"ContainerStarted","Data":"4bbd72e1b5a0945d81c374904bf00ae30969827a4c0bb32d448948b282ba7981"} Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.127558 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz" event={"ID":"b29f844c-ab64-4edb-9dee-0d19e4cb78d0","Type":"ContainerStarted","Data":"6abc29f68757ef7e37dc81711042d844e9497bb0166df2dbfc7567aca14a4b98"} Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.128483 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" event={"ID":"1f1e85fe-02dd-423c-991d-fafb30119601","Type":"ContainerStarted","Data":"fcc32758020978bae47057616af5b1019e77aefd7504eb3c49cd3eebceea8247"} Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.133007 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" event={"ID":"405ad6d4-7f81-48b8-a8a9-ae1d8062f078","Type":"ContainerStarted","Data":"83855ec46de22c3a14cf2d8f2006cbd5996f42e2e59fabeee48fcc35eb6b8ac5"} Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.208585 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.238617 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-7ntw8"] Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.274133 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v"] Oct 02 21:39:22 crc kubenswrapper[4636]: W1002 21:39:22.287110 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28a9b36b_5645_4d73_883a_87028af9455f.slice/crio-d4e5a3332b9b8bc2632038d54628184010684f4bbf2633d978de9ee1c8aa6000 WatchSource:0}: Error finding container d4e5a3332b9b8bc2632038d54628184010684f4bbf2633d978de9ee1c8aa6000: Status 404 returned error can't find the container with id d4e5a3332b9b8bc2632038d54628184010684f4bbf2633d978de9ee1c8aa6000 Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.493720 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn"] Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.547804 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj"] Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.607504 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm"] Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.638443 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9"] Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.667369 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb"] Oct 02 21:39:22 crc kubenswrapper[4636]: I1002 21:39:22.900415 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4"] Oct 02 21:39:22 crc kubenswrapper[4636]: W1002 21:39:22.937514 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod994cd5bb_107d_4426_9549_f5805479b1d7.slice/crio-840e9395758436add3df79b04d98cba01b234dae626e11ceb8b24bbfaa25e5b6 WatchSource:0}: Error finding container 840e9395758436add3df79b04d98cba01b234dae626e11ceb8b24bbfaa25e5b6: Status 404 returned error can't find the container with id 840e9395758436add3df79b04d98cba01b234dae626e11ceb8b24bbfaa25e5b6 Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:22.997899 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs"] Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.004532 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n"] Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.018074 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9"] Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.117719 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.117778 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.117833 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.118367 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9f0c7c91411fb3c1501fae8b5053b828ebe95e83f4048c87988a5b7f03a27fd8"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.118408 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://9f0c7c91411fb3c1501fae8b5053b828ebe95e83f4048c87988a5b7f03a27fd8" gracePeriod=600 Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.122691 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2"] Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.126008 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg"] Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.138827 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt"] Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.150582 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5"] Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.161469 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds"] Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.173731 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz"] Oct 02 21:39:23 crc kubenswrapper[4636]: W1002 21:39:23.173961 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20dffa00_5117_4d3e_8b67_467357444816.slice/crio-69a1579c069bd230f5a33bc7bb008071b3eb92b9d0ecb3cbec1ab2c6da094cf9 WatchSource:0}: Error finding container 69a1579c069bd230f5a33bc7bb008071b3eb92b9d0ecb3cbec1ab2c6da094cf9: Status 404 returned error can't find the container with id 69a1579c069bd230f5a33bc7bb008071b3eb92b9d0ecb3cbec1ab2c6da094cf9 Oct 02 21:39:23 crc kubenswrapper[4636]: E1002 21:39:23.204364 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vmbg4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-7d8bb7f44c-j7dds_openstack-operators(8be5d41d-4585-496e-b2a1-1f6514902e5a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.204481 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" event={"ID":"0d6cbc2a-0f5b-439d-963e-936e0a6a843d","Type":"ContainerStarted","Data":"c27564a9320a2dc1e1682b3f2f2ebeab5e51b13d1ded5c92c5a653fa602673d9"} Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.205413 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t"] Oct 02 21:39:23 crc kubenswrapper[4636]: E1002 21:39:23.211193 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:b7409dcf05c85eab205904d29d4276f8e927c772eba6363ecfa21ab10c4aaa01,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mcddx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-59d6cfdf45-spqgz_openstack-operators(8bf57ebc-7603-43a7-8bad-f52c3ad98ecf): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 02 21:39:23 crc kubenswrapper[4636]: W1002 21:39:23.212148 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc82f218c_6788_4558_b8b0_a375592d9377.slice/crio-b090a6959d950b332dc8e060228655f6025ccd64e6a4a1715532ac71b91bf7d2 WatchSource:0}: Error finding container b090a6959d950b332dc8e060228655f6025ccd64e6a4a1715532ac71b91bf7d2: Status 404 returned error can't find the container with id b090a6959d950b332dc8e060228655f6025ccd64e6a4a1715532ac71b91bf7d2 Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.214411 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" event={"ID":"a7b69bb7-2b74-4724-b459-215b7d515840","Type":"ContainerStarted","Data":"7e9bba22b0b2fc1ad22209b9dd044039c4ec471592a190b28492e8f999e88efb"} Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.246978 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" event={"ID":"a6a3569c-020b-4cf3-8895-8f31e98bae75","Type":"ContainerStarted","Data":"e6c61da747511f1596913708823478773d784110ee180de32e4360bf1330a672"} Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.258996 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" event={"ID":"b8281915-fca8-4081-9240-78d83b1fb453","Type":"ContainerStarted","Data":"3483a92eb343cd5243da650d9616e65e452e094bde07d8d9872f3e5f6401136a"} Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.263164 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-7ntw8" event={"ID":"28a9b36b-5645-4d73-883a-87028af9455f","Type":"ContainerStarted","Data":"d4e5a3332b9b8bc2632038d54628184010684f4bbf2633d978de9ee1c8aa6000"} Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.274243 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n" event={"ID":"46307289-bc17-4194-8ffa-e0cccfbb675c","Type":"ContainerStarted","Data":"fa60cadb4f03e1ffe3fde3fa8dfd04566936174f6e3d07d08eaeaa7b5040489b"} Oct 02 21:39:23 crc kubenswrapper[4636]: E1002 21:39:23.277957 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tlc2z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-6859f9b676-bqhzt_openstack-operators(c82f218c-6788-4558-b8b0-a375592d9377): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.278227 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm" event={"ID":"e439d19a-9b7e-4245-baa8-1fb4a6c1f56a","Type":"ContainerStarted","Data":"7eed958fde6df7152a5a2d83382e4de360536867aec2fc5119e0600a381f132f"} Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.296052 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" event={"ID":"a10a56aa-5ebe-4f98-8f99-fcba515c254d","Type":"ContainerStarted","Data":"0dcad32b087eb5226dac538a11050ab9279261d5faa3fc803940c232b8b6b176"} Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.298257 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9" event={"ID":"a50690d6-0e31-4944-9011-35076543304f","Type":"ContainerStarted","Data":"e65ecb60a3891e30c1d1cb8bf3942bec92242614bd03ebd9e4232775f514b4c4"} Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.300094 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj" event={"ID":"a0b38373-c0c9-4fd9-ad1b-d7af382f370b","Type":"ContainerStarted","Data":"222df25aa11e02bf97d47612033772b4b8513ffd507a9a6dc92e49015d214a8d"} Oct 02 21:39:23 crc kubenswrapper[4636]: I1002 21:39:23.303511 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" event={"ID":"994cd5bb-107d-4426-9549-f5805479b1d7","Type":"ContainerStarted","Data":"840e9395758436add3df79b04d98cba01b234dae626e11ceb8b24bbfaa25e5b6"} Oct 02 21:39:23 crc kubenswrapper[4636]: E1002 21:39:23.538085 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" podUID="8be5d41d-4585-496e-b2a1-1f6514902e5a" Oct 02 21:39:23 crc kubenswrapper[4636]: E1002 21:39:23.572264 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" podUID="8bf57ebc-7603-43a7-8bad-f52c3ad98ecf" Oct 02 21:39:23 crc kubenswrapper[4636]: E1002 21:39:23.622321 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" podUID="c82f218c-6788-4558-b8b0-a375592d9377" Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.341340 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" event={"ID":"8bf57ebc-7603-43a7-8bad-f52c3ad98ecf","Type":"ContainerStarted","Data":"39ff23225abdd0331754131d9026f7f1045d7ac7bde20f05fd6f5a69fd90864d"} Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.341611 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" event={"ID":"8bf57ebc-7603-43a7-8bad-f52c3ad98ecf","Type":"ContainerStarted","Data":"8e427f2fa73a6683437f81a305f27b55dbfedbc9fef3bfdc0acba278d0760757"} Oct 02 21:39:24 crc kubenswrapper[4636]: E1002 21:39:24.343722 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:b7409dcf05c85eab205904d29d4276f8e927c772eba6363ecfa21ab10c4aaa01\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" podUID="8bf57ebc-7603-43a7-8bad-f52c3ad98ecf" Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.344449 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" event={"ID":"31a7600e-116a-458a-9102-0e4207fcc73c","Type":"ContainerStarted","Data":"e8c8859e744fb5a8527cbe62805ae6530851bab4a5705494fb15f2657bd22b04"} Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.363043 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" event={"ID":"72e01c24-5096-468f-8a34-36db5383bf10","Type":"ContainerStarted","Data":"21050972399e6171313655fdcb5366dfbb0ef1d32d321713a60c4cc030c11932"} Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.363086 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" event={"ID":"72e01c24-5096-468f-8a34-36db5383bf10","Type":"ContainerStarted","Data":"e6f74a4212c50eff9b3cc351d32e8c4a8e007dd39c3b32a34c58c415d60dc565"} Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.363097 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" event={"ID":"72e01c24-5096-468f-8a34-36db5383bf10","Type":"ContainerStarted","Data":"e1c9b70c76a4737f08f7e6bb6be6582beef6c7fe036f7355c044e4236b5653e9"} Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.363995 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.376955 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" event={"ID":"8be5d41d-4585-496e-b2a1-1f6514902e5a","Type":"ContainerStarted","Data":"1e162aa536b1532a93cfc95736dd4e393458b9a90334e6ad5b618f05eeb4d0e1"} Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.376997 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" event={"ID":"8be5d41d-4585-496e-b2a1-1f6514902e5a","Type":"ContainerStarted","Data":"7e1cc28b3487ce5960d18be95f5da02f36d625bbd8fcfa541d596025a79c9682"} Oct 02 21:39:24 crc kubenswrapper[4636]: E1002 21:39:24.378419 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548\\\"\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" podUID="8be5d41d-4585-496e-b2a1-1f6514902e5a" Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.379090 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg" event={"ID":"ccf677d0-a6b1-44ad-9d02-70fc42842c3a","Type":"ContainerStarted","Data":"7c7c4523174e67f61a3acf0e3a0b6a52e69abd3b4765abd8015cd5450fc91b02"} Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.385577 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" event={"ID":"20dffa00-5117-4d3e-8b67-467357444816","Type":"ContainerStarted","Data":"69a1579c069bd230f5a33bc7bb008071b3eb92b9d0ecb3cbec1ab2c6da094cf9"} Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.386731 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" event={"ID":"c82f218c-6788-4558-b8b0-a375592d9377","Type":"ContainerStarted","Data":"d66b6b0b1e7c13965b15cfb039505559db8c95e118ee075397c7b32c0be86fb2"} Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.386767 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" event={"ID":"c82f218c-6788-4558-b8b0-a375592d9377","Type":"ContainerStarted","Data":"b090a6959d950b332dc8e060228655f6025ccd64e6a4a1715532ac71b91bf7d2"} Oct 02 21:39:24 crc kubenswrapper[4636]: E1002 21:39:24.389600 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" podUID="c82f218c-6788-4558-b8b0-a375592d9377" Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.394895 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="9f0c7c91411fb3c1501fae8b5053b828ebe95e83f4048c87988a5b7f03a27fd8" exitCode=0 Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.394951 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"9f0c7c91411fb3c1501fae8b5053b828ebe95e83f4048c87988a5b7f03a27fd8"} Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.394975 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"04eab656ee8c7baf8b67922349c9645220b7ee9b1e0b335d95acde2eb540ab5f"} Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.394991 4636 scope.go:117] "RemoveContainer" containerID="ab44b8ce55b9d49cf042f8f75e697442d8007223e48a516b488f1007f0a6409a" Oct 02 21:39:24 crc kubenswrapper[4636]: I1002 21:39:24.412240 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" podStartSLOduration=4.412224351 podStartE2EDuration="4.412224351s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:39:24.405593201 +0000 UTC m=+955.728601220" watchObservedRunningTime="2025-10-02 21:39:24.412224351 +0000 UTC m=+955.735232370" Oct 02 21:39:25 crc kubenswrapper[4636]: E1002 21:39:25.495436 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" podUID="c82f218c-6788-4558-b8b0-a375592d9377" Oct 02 21:39:25 crc kubenswrapper[4636]: E1002 21:39:25.498867 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:b7409dcf05c85eab205904d29d4276f8e927c772eba6363ecfa21ab10c4aaa01\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" podUID="8bf57ebc-7603-43a7-8bad-f52c3ad98ecf" Oct 02 21:39:25 crc kubenswrapper[4636]: E1002 21:39:25.526385 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548\\\"\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" podUID="8be5d41d-4585-496e-b2a1-1f6514902e5a" Oct 02 21:39:32 crc kubenswrapper[4636]: I1002 21:39:32.215698 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-8598b6c48f-b7l8t" Oct 02 21:39:37 crc kubenswrapper[4636]: E1002 21:39:37.471720 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6" Oct 02 21:39:37 crc kubenswrapper[4636]: E1002 21:39:37.472444 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-k8z28,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-fcd7d9895-pdhqs_openstack-operators(0d6cbc2a-0f5b-439d-963e-936e0a6a843d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:39:40 crc kubenswrapper[4636]: E1002 21:39:40.125223 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:585796b996a5b6d7ad68f0cb420bf4f2ee38c9f16f194e3111c162ce91ea8a7b" Oct 02 21:39:40 crc kubenswrapper[4636]: E1002 21:39:40.125651 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:585796b996a5b6d7ad68f0cb420bf4f2ee38c9f16f194e3111c162ce91ea8a7b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lwpd5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-75dfd9b554-r4gzg_openstack-operators(330ebc0e-6515-4a61-9d41-43f11247c659): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:39:40 crc kubenswrapper[4636]: E1002 21:39:40.588169 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:8fdb7ea8542adb2eca73f11bd78e6aebceed2ba7a1e9fdd149c75e0049d09ce0" Oct 02 21:39:40 crc kubenswrapper[4636]: E1002 21:39:40.588400 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:8fdb7ea8542adb2eca73f11bd78e6aebceed2ba7a1e9fdd149c75e0049d09ce0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zkmfh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-6fd6854b49-h8kh7_openstack-operators(1f1e85fe-02dd-423c-991d-fafb30119601): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:39:42 crc kubenswrapper[4636]: E1002 21:39:42.364071 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:9fed055cd1f09627ef351e61c7e42227570193ccd5d33167a607c49b442a9d87" Oct 02 21:39:42 crc kubenswrapper[4636]: E1002 21:39:42.364503 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:9fed055cd1f09627ef351e61c7e42227570193ccd5d33167a607c49b442a9d87,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jhkdv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-846dff85b5-lc2st_openstack-operators(405ad6d4-7f81-48b8-a8a9-ae1d8062f078): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:39:42 crc kubenswrapper[4636]: E1002 21:39:42.914529 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:229213522e05cbd3034bb80a8ddb1c701cf5f6d74c696e8085597ef6da27ca4b" Oct 02 21:39:42 crc kubenswrapper[4636]: E1002 21:39:42.914693 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:229213522e05cbd3034bb80a8ddb1c701cf5f6d74c696e8085597ef6da27ca4b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kw26w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-79d68d6c85-l889h_openstack-operators(26738e55-423c-4cab-821d-b9abb0d8a026): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:39:44 crc kubenswrapper[4636]: E1002 21:39:44.347291 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb" Oct 02 21:39:44 crc kubenswrapper[4636]: E1002 21:39:44.347966 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6fkv4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5cd5cb47d7-g9jx5_openstack-operators(31a7600e-116a-458a-9102-0e4207fcc73c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:39:45 crc kubenswrapper[4636]: E1002 21:39:45.682679 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:a82409e6d6a5554aad95acfe6fa4784e33de19a963eb8b1da1a80a3e6cf1ab55" Oct 02 21:39:45 crc kubenswrapper[4636]: E1002 21:39:45.682888 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:a82409e6d6a5554aad95acfe6fa4784e33de19a963eb8b1da1a80a3e6cf1ab55,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p5f4g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-555c7456bd-z4lx2_openstack-operators(20dffa00-5117-4d3e-8b67-467357444816): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:39:46 crc kubenswrapper[4636]: E1002 21:39:46.158439 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475" Oct 02 21:39:46 crc kubenswrapper[4636]: E1002 21:39:46.158628 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-srfzk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-5fbf469cd7-7vrb4_openstack-operators(994cd5bb-107d-4426-9549-f5805479b1d7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:39:46 crc kubenswrapper[4636]: E1002 21:39:46.632274 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:516f76ed86dd34225e6d0309451c7886bb81ff69032ba28125ae4d0cec54bce7" Oct 02 21:39:46 crc kubenswrapper[4636]: E1002 21:39:46.632662 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:516f76ed86dd34225e6d0309451c7886bb81ff69032ba28125ae4d0cec54bce7,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mxffp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7f55849f88-ccnnb_openstack-operators(a6a3569c-020b-4cf3-8895-8f31e98bae75): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:39:48 crc kubenswrapper[4636]: E1002 21:39:48.769109 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:e9ff0784bffe5b9a6d1a77a1b8866dd26b8d0c54465707df1808f68caad93a95" Oct 02 21:39:48 crc kubenswrapper[4636]: E1002 21:39:48.769333 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:e9ff0784bffe5b9a6d1a77a1b8866dd26b8d0c54465707df1808f68caad93a95,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dxn7r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-6769b867d9-d74cn_openstack-operators(a10a56aa-5ebe-4f98-8f99-fcba515c254d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:39:49 crc kubenswrapper[4636]: E1002 21:39:49.434120 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:38abe6135ccaa369bc831f7878a6dfdf9a5a993a882e1c42073ca43582766f12" Oct 02 21:39:49 crc kubenswrapper[4636]: E1002 21:39:49.434355 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:38abe6135ccaa369bc831f7878a6dfdf9a5a993a882e1c42073ca43582766f12,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2jhjh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-84bc9db6cc-dpx7v_openstack-operators(a7b69bb7-2b74-4724-b459-215b7d515840): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.442116 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" podUID="1f1e85fe-02dd-423c-991d-fafb30119601" Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.451585 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" podUID="a10a56aa-5ebe-4f98-8f99-fcba515c254d" Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.494827 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" podUID="a6a3569c-020b-4cf3-8895-8f31e98bae75" Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.525692 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" podUID="994cd5bb-107d-4426-9549-f5805479b1d7" Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.545602 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" podUID="405ad6d4-7f81-48b8-a8a9-ae1d8062f078" Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.584729 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" podUID="20dffa00-5117-4d3e-8b67-467357444816" Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.585454 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" podUID="a7b69bb7-2b74-4724-b459-215b7d515840" Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.593220 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" podUID="330ebc0e-6515-4a61-9d41-43f11247c659" Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.610133 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" podUID="26738e55-423c-4cab-821d-b9abb0d8a026" Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.673486 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" podUID="0d6cbc2a-0f5b-439d-963e-936e0a6a843d" Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.703820 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" event={"ID":"405ad6d4-7f81-48b8-a8a9-ae1d8062f078","Type":"ContainerStarted","Data":"6c86c045dae14f8ce0bb074bfbc68cfe37aa50d7cf5c8550566f88d573f08ba9"} Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.713427 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:9fed055cd1f09627ef351e61c7e42227570193ccd5d33167a607c49b442a9d87\\\"\"" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" podUID="405ad6d4-7f81-48b8-a8a9-ae1d8062f078" Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.715339 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" event={"ID":"8bf57ebc-7603-43a7-8bad-f52c3ad98ecf","Type":"ContainerStarted","Data":"92c1a589d9bef4d5405c52926ba26062822f3a74707f470389cbc90067eacc82"} Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.716125 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.721455 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" event={"ID":"330ebc0e-6515-4a61-9d41-43f11247c659","Type":"ContainerStarted","Data":"c1758ff90a7199b54b0600a4e03ee54cf8e1985d74383c44b08003ea04da7d6d"} Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.723805 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" event={"ID":"a6a3569c-020b-4cf3-8895-8f31e98bae75","Type":"ContainerStarted","Data":"8c25f50468a0daefcf13bb1934ad82d69ce7ae50e31b858a976340ac8ee9f9c3"} Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.731994 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" event={"ID":"0d6cbc2a-0f5b-439d-963e-936e0a6a843d","Type":"ContainerStarted","Data":"d33907ca0490a6ffdaf97f79c0f24fd8e505e7583ab3d22bd10ede88bec843dd"} Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.736318 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:516f76ed86dd34225e6d0309451c7886bb81ff69032ba28125ae4d0cec54bce7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" podUID="a6a3569c-020b-4cf3-8895-8f31e98bae75" Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.738934 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" event={"ID":"20dffa00-5117-4d3e-8b67-467357444816","Type":"ContainerStarted","Data":"132b15ae24a22d66101a2bfe01f4606febd11c25524fe7507216033a81ad6cab"} Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.741425 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:a82409e6d6a5554aad95acfe6fa4784e33de19a963eb8b1da1a80a3e6cf1ab55\\\"\"" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" podUID="20dffa00-5117-4d3e-8b67-467357444816" Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.761026 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" event={"ID":"a7b69bb7-2b74-4724-b459-215b7d515840","Type":"ContainerStarted","Data":"0c61dbc2d6400751f98b46573a45b4d82776e674add3a57bb6f0e311482c04fb"} Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.765239 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:38abe6135ccaa369bc831f7878a6dfdf9a5a993a882e1c42073ca43582766f12\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" podUID="a7b69bb7-2b74-4724-b459-215b7d515840" Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.786563 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz" event={"ID":"b29f844c-ab64-4edb-9dee-0d19e4cb78d0","Type":"ContainerStarted","Data":"a0b864fdb2080974d88cef40312146544de0f5b35ed3e67c2a12ee3132d57c34"} Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.804554 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" event={"ID":"a10a56aa-5ebe-4f98-8f99-fcba515c254d","Type":"ContainerStarted","Data":"689e07f6ceb663e4c72ed182a0d7cf12ca5f42f190868e4d50a717729f468d13"} Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.816736 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" event={"ID":"1f1e85fe-02dd-423c-991d-fafb30119601","Type":"ContainerStarted","Data":"472bbf703b62edb6b26d0bcf074beb2328db1d6406814de226c48b32e11cd5fb"} Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.822200 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:e9ff0784bffe5b9a6d1a77a1b8866dd26b8d0c54465707df1808f68caad93a95\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" podUID="a10a56aa-5ebe-4f98-8f99-fcba515c254d" Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.842323 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" event={"ID":"26738e55-423c-4cab-821d-b9abb0d8a026","Type":"ContainerStarted","Data":"7c437a6c0ad0912b120b96f04eb6d5597b70a5b359e23f59a658663b80ea95c3"} Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.860261 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:229213522e05cbd3034bb80a8ddb1c701cf5f6d74c696e8085597ef6da27ca4b\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" podUID="26738e55-423c-4cab-821d-b9abb0d8a026" Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.874067 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" event={"ID":"994cd5bb-107d-4426-9549-f5805479b1d7","Type":"ContainerStarted","Data":"6cb2b3b9945050a289c43e6810169e3cad7e7b7dbfda6a2f812567400f2a3ab4"} Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.888588 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" podUID="31a7600e-116a-458a-9102-0e4207fcc73c" Oct 02 21:39:51 crc kubenswrapper[4636]: I1002 21:39:51.888642 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" podStartSLOduration=4.031437235 podStartE2EDuration="31.888631262s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:23.211075996 +0000 UTC m=+954.534084015" lastFinishedPulling="2025-10-02 21:39:51.068269993 +0000 UTC m=+982.391278042" observedRunningTime="2025-10-02 21:39:51.887802439 +0000 UTC m=+983.210810458" watchObservedRunningTime="2025-10-02 21:39:51.888631262 +0000 UTC m=+983.211639281" Oct 02 21:39:51 crc kubenswrapper[4636]: E1002 21:39:51.915994 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475\\\"\"" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" podUID="994cd5bb-107d-4426-9549-f5805479b1d7" Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.897622 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" event={"ID":"b8281915-fca8-4081-9240-78d83b1fb453","Type":"ContainerStarted","Data":"06dfe5cbf8c05f46c623dff613b8239cb1cc4973588ead495ea3b609b3ad2a8f"} Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.897998 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" event={"ID":"b8281915-fca8-4081-9240-78d83b1fb453","Type":"ContainerStarted","Data":"92344a69d1196825932bb0e5b818200daefe19916686de63353935c1eaa9dea7"} Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.898047 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.913996 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" event={"ID":"31a7600e-116a-458a-9102-0e4207fcc73c","Type":"ContainerStarted","Data":"d4bf367d56667ebff50bfff4b125e2db2f60aa9c623996f615a74a8e50bd12f0"} Oct 02 21:39:52 crc kubenswrapper[4636]: E1002 21:39:52.918988 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb\\\"\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" podUID="31a7600e-116a-458a-9102-0e4207fcc73c" Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.925677 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n" event={"ID":"46307289-bc17-4194-8ffa-e0cccfbb675c","Type":"ContainerStarted","Data":"0ec1dbf77a331a88728117288f174e876aacdc0b4ebbaea49b1d4c72bd8fc7dc"} Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.925721 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n" event={"ID":"46307289-bc17-4194-8ffa-e0cccfbb675c","Type":"ContainerStarted","Data":"49be87af1d7119dc66e71497b64b808d8d05a08fd804b624905585789fb5f02e"} Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.926395 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n" Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.945509 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm" event={"ID":"e439d19a-9b7e-4245-baa8-1fb4a6c1f56a","Type":"ContainerStarted","Data":"9a0829587cbc2783d3030c4e36b6717c56dc3505e741a37d0cb2075af42399f2"} Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.945550 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm" event={"ID":"e439d19a-9b7e-4245-baa8-1fb4a6c1f56a","Type":"ContainerStarted","Data":"79044479f2041464b10c71fb405e512357734822446288d93e18a6b1e51103bf"} Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.946118 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm" Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.952972 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" event={"ID":"8be5d41d-4585-496e-b2a1-1f6514902e5a","Type":"ContainerStarted","Data":"51b70929f94fd14510c8de2f1b423332b8b4d1ef66c7f37681b544d03e7abf9c"} Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.953645 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.963479 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg" event={"ID":"ccf677d0-a6b1-44ad-9d02-70fc42842c3a","Type":"ContainerStarted","Data":"9a4f17160b3a03ba266a70deb4eff911b5eb6d0cd87dd51f5d0f5c5f53e84017"} Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.973908 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" event={"ID":"c82f218c-6788-4558-b8b0-a375592d9377","Type":"ContainerStarted","Data":"e98a1359ee56f308c08d5193cd178aaf855e3b6612c0191fc1e9dc80ac2e7c44"} Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.974736 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.992717 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz" event={"ID":"b29f844c-ab64-4edb-9dee-0d19e4cb78d0","Type":"ContainerStarted","Data":"c0b702554c65ca219504aaa73d31c0a5f8e00938333c91b209d43bbbf701b07d"} Oct 02 21:39:52 crc kubenswrapper[4636]: I1002 21:39:52.993897 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.024638 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-7ntw8" event={"ID":"28a9b36b-5645-4d73-883a-87028af9455f","Type":"ContainerStarted","Data":"92e67fd2d820318fc24e85ec687b589dea0f4fc6f6bcb86748c1582fc4674338"} Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.025402 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-599898f689-7ntw8" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.025505 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-7ntw8" event={"ID":"28a9b36b-5645-4d73-883a-87028af9455f","Type":"ContainerStarted","Data":"85c5aff97b5b36c2916d64aa8ea732d010e13cde397d239b66073da35c384fbb"} Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.049649 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9" event={"ID":"a50690d6-0e31-4944-9011-35076543304f","Type":"ContainerStarted","Data":"0a6816c7755f0bb1e5f1ae2d446d47a0747ff9ea8740d8d93d124ff4cdbbe0f3"} Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.049690 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9" event={"ID":"a50690d6-0e31-4944-9011-35076543304f","Type":"ContainerStarted","Data":"baf6e95f040a547f169a2f02ca1f5a3d7cc110846a96bb767c107cfdb434a9f9"} Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.050765 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.061151 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj" event={"ID":"a0b38373-c0c9-4fd9-ad1b-d7af382f370b","Type":"ContainerStarted","Data":"4268041aa34c6d5cc3263385e9b3091cb1b9f9a2a6707e835d3c2720134ad630"} Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.061191 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.061204 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj" event={"ID":"a0b38373-c0c9-4fd9-ad1b-d7af382f370b","Type":"ContainerStarted","Data":"50a09269c88ac20a0232dfaf895d3a6d77b48e133dd75d3f4cf3db1c97ef8364"} Oct 02 21:39:53 crc kubenswrapper[4636]: E1002 21:39:53.062521 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:a82409e6d6a5554aad95acfe6fa4784e33de19a963eb8b1da1a80a3e6cf1ab55\\\"\"" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" podUID="20dffa00-5117-4d3e-8b67-467357444816" Oct 02 21:39:53 crc kubenswrapper[4636]: E1002 21:39:53.063874 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:516f76ed86dd34225e6d0309451c7886bb81ff69032ba28125ae4d0cec54bce7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" podUID="a6a3569c-020b-4cf3-8895-8f31e98bae75" Oct 02 21:39:53 crc kubenswrapper[4636]: E1002 21:39:53.066733 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:38abe6135ccaa369bc831f7878a6dfdf9a5a993a882e1c42073ca43582766f12\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" podUID="a7b69bb7-2b74-4724-b459-215b7d515840" Oct 02 21:39:53 crc kubenswrapper[4636]: E1002 21:39:53.067076 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475\\\"\"" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" podUID="994cd5bb-107d-4426-9549-f5805479b1d7" Oct 02 21:39:53 crc kubenswrapper[4636]: E1002 21:39:53.067107 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:e9ff0784bffe5b9a6d1a77a1b8866dd26b8d0c54465707df1808f68caad93a95\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" podUID="a10a56aa-5ebe-4f98-8f99-fcba515c254d" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.091048 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" podStartSLOduration=5.308517994 podStartE2EDuration="33.091032606s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:23.092696308 +0000 UTC m=+954.415704327" lastFinishedPulling="2025-10-02 21:39:50.87521091 +0000 UTC m=+982.198218939" observedRunningTime="2025-10-02 21:39:53.066786833 +0000 UTC m=+984.389794862" watchObservedRunningTime="2025-10-02 21:39:53.091032606 +0000 UTC m=+984.414040615" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.256032 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" podStartSLOduration=5.419582988 podStartE2EDuration="33.256016702s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:23.277102439 +0000 UTC m=+954.600110458" lastFinishedPulling="2025-10-02 21:39:51.113536133 +0000 UTC m=+982.436544172" observedRunningTime="2025-10-02 21:39:53.202370376 +0000 UTC m=+984.525378395" watchObservedRunningTime="2025-10-02 21:39:53.256016702 +0000 UTC m=+984.579024721" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.373484 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj" podStartSLOduration=5.549949759 podStartE2EDuration="33.373455657s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:22.587108132 +0000 UTC m=+953.910116141" lastFinishedPulling="2025-10-02 21:39:50.41061402 +0000 UTC m=+981.733622039" observedRunningTime="2025-10-02 21:39:53.371047222 +0000 UTC m=+984.694055231" watchObservedRunningTime="2025-10-02 21:39:53.373455657 +0000 UTC m=+984.696463676" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.537888 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n" podStartSLOduration=6.673823266 podStartE2EDuration="33.537872018s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:23.076879581 +0000 UTC m=+954.399887600" lastFinishedPulling="2025-10-02 21:39:49.940928313 +0000 UTC m=+981.263936352" observedRunningTime="2025-10-02 21:39:53.406244211 +0000 UTC m=+984.729252230" watchObservedRunningTime="2025-10-02 21:39:53.537872018 +0000 UTC m=+984.860880037" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.556413 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg" podStartSLOduration=4.885213482 podStartE2EDuration="32.556397187s" podCreationTimestamp="2025-10-02 21:39:21 +0000 UTC" firstStartedPulling="2025-10-02 21:39:23.204034316 +0000 UTC m=+954.527042335" lastFinishedPulling="2025-10-02 21:39:50.875218021 +0000 UTC m=+982.198226040" observedRunningTime="2025-10-02 21:39:53.552293397 +0000 UTC m=+984.875301406" watchObservedRunningTime="2025-10-02 21:39:53.556397187 +0000 UTC m=+984.879405206" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.653589 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm" podStartSLOduration=5.418343046 podStartE2EDuration="33.653573346s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:22.640911465 +0000 UTC m=+953.963919484" lastFinishedPulling="2025-10-02 21:39:50.876141775 +0000 UTC m=+982.199149784" observedRunningTime="2025-10-02 21:39:53.651395467 +0000 UTC m=+984.974403486" watchObservedRunningTime="2025-10-02 21:39:53.653573346 +0000 UTC m=+984.976581365" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.779071 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" podStartSLOduration=5.869719365 podStartE2EDuration="33.779056108s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:23.204252842 +0000 UTC m=+954.527260861" lastFinishedPulling="2025-10-02 21:39:51.113589595 +0000 UTC m=+982.436597604" observedRunningTime="2025-10-02 21:39:53.777893316 +0000 UTC m=+985.100901335" watchObservedRunningTime="2025-10-02 21:39:53.779056108 +0000 UTC m=+985.102064127" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.818529 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-599898f689-7ntw8" podStartSLOduration=5.200175675 podStartE2EDuration="33.818511391s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:22.288742293 +0000 UTC m=+953.611750312" lastFinishedPulling="2025-10-02 21:39:50.907077989 +0000 UTC m=+982.230086028" observedRunningTime="2025-10-02 21:39:53.814337199 +0000 UTC m=+985.137345218" watchObservedRunningTime="2025-10-02 21:39:53.818511391 +0000 UTC m=+985.141519410" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.854267 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9" podStartSLOduration=5.719256231 podStartE2EDuration="33.854251414s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:22.739263842 +0000 UTC m=+954.062271861" lastFinishedPulling="2025-10-02 21:39:50.874259025 +0000 UTC m=+982.197267044" observedRunningTime="2025-10-02 21:39:53.850428401 +0000 UTC m=+985.173436420" watchObservedRunningTime="2025-10-02 21:39:53.854251414 +0000 UTC m=+985.177259433" Oct 02 21:39:53 crc kubenswrapper[4636]: I1002 21:39:53.933155 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz" podStartSLOduration=5.878894559 podStartE2EDuration="34.9331394s" podCreationTimestamp="2025-10-02 21:39:19 +0000 UTC" firstStartedPulling="2025-10-02 21:39:21.356555584 +0000 UTC m=+952.679563613" lastFinishedPulling="2025-10-02 21:39:50.410800435 +0000 UTC m=+981.733808454" observedRunningTime="2025-10-02 21:39:53.887281395 +0000 UTC m=+985.210289414" watchObservedRunningTime="2025-10-02 21:39:53.9331394 +0000 UTC m=+985.256147419" Oct 02 21:39:54 crc kubenswrapper[4636]: I1002 21:39:54.068476 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" event={"ID":"0d6cbc2a-0f5b-439d-963e-936e0a6a843d","Type":"ContainerStarted","Data":"36ccdd070211babfea9afc6d6e299b8de57a1c69b4a77450cc1d64a4f48adca2"} Oct 02 21:39:54 crc kubenswrapper[4636]: I1002 21:39:54.069241 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" Oct 02 21:39:54 crc kubenswrapper[4636]: I1002 21:39:54.071408 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" event={"ID":"330ebc0e-6515-4a61-9d41-43f11247c659","Type":"ContainerStarted","Data":"e4afef499b8193b65fcd67ebfbbb5d00fa89dd7470800bd18b1a7ceedb3ff8bb"} Oct 02 21:39:54 crc kubenswrapper[4636]: I1002 21:39:54.071804 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" Oct 02 21:39:54 crc kubenswrapper[4636]: I1002 21:39:54.073578 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" event={"ID":"1f1e85fe-02dd-423c-991d-fafb30119601","Type":"ContainerStarted","Data":"9417c3df141e8c6cdc77234a7ec717858a7437c2f875494dcec8f93bb8e06ca4"} Oct 02 21:39:54 crc kubenswrapper[4636]: E1002 21:39:54.078107 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb\\\"\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" podUID="31a7600e-116a-458a-9102-0e4207fcc73c" Oct 02 21:39:54 crc kubenswrapper[4636]: I1002 21:39:54.096334 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" podStartSLOduration=4.613659445 podStartE2EDuration="34.096318637s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:23.061068744 +0000 UTC m=+954.384076763" lastFinishedPulling="2025-10-02 21:39:52.543727936 +0000 UTC m=+983.866735955" observedRunningTime="2025-10-02 21:39:54.092866974 +0000 UTC m=+985.415874993" watchObservedRunningTime="2025-10-02 21:39:54.096318637 +0000 UTC m=+985.419326646" Oct 02 21:39:54 crc kubenswrapper[4636]: I1002 21:39:54.125112 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" podStartSLOduration=3.318790678 podStartE2EDuration="34.125097833s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:21.918003048 +0000 UTC m=+953.241011067" lastFinishedPulling="2025-10-02 21:39:52.724310203 +0000 UTC m=+984.047318222" observedRunningTime="2025-10-02 21:39:54.120333414 +0000 UTC m=+985.443341433" watchObservedRunningTime="2025-10-02 21:39:54.125097833 +0000 UTC m=+985.448105852" Oct 02 21:39:54 crc kubenswrapper[4636]: I1002 21:39:54.151153 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" podStartSLOduration=3.405522225 podStartE2EDuration="34.151135594s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:21.760978387 +0000 UTC m=+953.083986406" lastFinishedPulling="2025-10-02 21:39:52.506591766 +0000 UTC m=+983.829599775" observedRunningTime="2025-10-02 21:39:54.14688619 +0000 UTC m=+985.469894209" watchObservedRunningTime="2025-10-02 21:39:54.151135594 +0000 UTC m=+985.474143613" Oct 02 21:39:55 crc kubenswrapper[4636]: I1002 21:39:55.080286 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" Oct 02 21:39:56 crc kubenswrapper[4636]: I1002 21:39:56.096460 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" event={"ID":"26738e55-423c-4cab-821d-b9abb0d8a026","Type":"ContainerStarted","Data":"77230e82f02f7779e9f32a593b5148ed2f1aad533ab5fe44d886855b8120d8b9"} Oct 02 21:39:56 crc kubenswrapper[4636]: I1002 21:39:56.096928 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" Oct 02 21:39:56 crc kubenswrapper[4636]: I1002 21:39:56.099038 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" event={"ID":"405ad6d4-7f81-48b8-a8a9-ae1d8062f078","Type":"ContainerStarted","Data":"5f66e4fc1d807e6ef4acbfae736becf44a8ce91d6812110364f120ca17fd8f91"} Oct 02 21:39:56 crc kubenswrapper[4636]: I1002 21:39:56.099523 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" Oct 02 21:39:56 crc kubenswrapper[4636]: I1002 21:39:56.116137 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" podStartSLOduration=3.455981553 podStartE2EDuration="37.116114839s" podCreationTimestamp="2025-10-02 21:39:19 +0000 UTC" firstStartedPulling="2025-10-02 21:39:21.52338274 +0000 UTC m=+952.846390759" lastFinishedPulling="2025-10-02 21:39:55.183516026 +0000 UTC m=+986.506524045" observedRunningTime="2025-10-02 21:39:56.112820201 +0000 UTC m=+987.435828260" watchObservedRunningTime="2025-10-02 21:39:56.116114839 +0000 UTC m=+987.439122878" Oct 02 21:39:56 crc kubenswrapper[4636]: I1002 21:39:56.133427 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" podStartSLOduration=2.669719055 podStartE2EDuration="36.133409156s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:21.873024124 +0000 UTC m=+953.196032143" lastFinishedPulling="2025-10-02 21:39:55.336714215 +0000 UTC m=+986.659722244" observedRunningTime="2025-10-02 21:39:56.132244574 +0000 UTC m=+987.455252613" watchObservedRunningTime="2025-10-02 21:39:56.133409156 +0000 UTC m=+987.456417175" Oct 02 21:40:00 crc kubenswrapper[4636]: I1002 21:40:00.385967 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-b52pz" Oct 02 21:40:00 crc kubenswrapper[4636]: I1002 21:40:00.387358 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-l889h" Oct 02 21:40:00 crc kubenswrapper[4636]: I1002 21:40:00.388706 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-r4gzg" Oct 02 21:40:00 crc kubenswrapper[4636]: I1002 21:40:00.409959 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-lc2st" Oct 02 21:40:00 crc kubenswrapper[4636]: I1002 21:40:00.481067 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-599898f689-7ntw8" Oct 02 21:40:00 crc kubenswrapper[4636]: I1002 21:40:00.705481 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-h8kh7" Oct 02 21:40:00 crc kubenswrapper[4636]: I1002 21:40:00.817026 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-j7mxj" Oct 02 21:40:00 crc kubenswrapper[4636]: I1002 21:40:00.843219 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-zjsn9" Oct 02 21:40:00 crc kubenswrapper[4636]: I1002 21:40:00.920227 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-spqgz" Oct 02 21:40:01 crc kubenswrapper[4636]: I1002 21:40:01.014660 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-4gd5n" Oct 02 21:40:01 crc kubenswrapper[4636]: I1002 21:40:01.049384 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-j7dds" Oct 02 21:40:01 crc kubenswrapper[4636]: I1002 21:40:01.086983 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-bqhzt" Oct 02 21:40:01 crc kubenswrapper[4636]: I1002 21:40:01.126443 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-j44cm" Oct 02 21:40:01 crc kubenswrapper[4636]: I1002 21:40:01.407709 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-pdhqs" Oct 02 21:40:01 crc kubenswrapper[4636]: I1002 21:40:01.540415 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9" Oct 02 21:40:06 crc kubenswrapper[4636]: I1002 21:40:06.170363 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" event={"ID":"a7b69bb7-2b74-4724-b459-215b7d515840","Type":"ContainerStarted","Data":"cc9f31a72c28bb7f9451d9c621b6ee3dd6ca7e2eeae372a088ca752e10c068c5"} Oct 02 21:40:06 crc kubenswrapper[4636]: I1002 21:40:06.172405 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" Oct 02 21:40:06 crc kubenswrapper[4636]: I1002 21:40:06.172872 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" event={"ID":"a6a3569c-020b-4cf3-8895-8f31e98bae75","Type":"ContainerStarted","Data":"d7b8deb9c9e0f77385c3d32c4f63f8aff321452f3b116be20bac6f1ce05f3911"} Oct 02 21:40:06 crc kubenswrapper[4636]: I1002 21:40:06.173048 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" Oct 02 21:40:06 crc kubenswrapper[4636]: I1002 21:40:06.185959 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" podStartSLOduration=2.46892679 podStartE2EDuration="46.185941654s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:22.318818555 +0000 UTC m=+953.641826574" lastFinishedPulling="2025-10-02 21:40:06.035833419 +0000 UTC m=+997.358841438" observedRunningTime="2025-10-02 21:40:06.18503912 +0000 UTC m=+997.508047139" watchObservedRunningTime="2025-10-02 21:40:06.185941654 +0000 UTC m=+997.508949673" Oct 02 21:40:06 crc kubenswrapper[4636]: I1002 21:40:06.207937 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" podStartSLOduration=3.581373786 podStartE2EDuration="46.207915226s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:22.73958458 +0000 UTC m=+954.062592599" lastFinishedPulling="2025-10-02 21:40:05.36612601 +0000 UTC m=+996.689134039" observedRunningTime="2025-10-02 21:40:06.203153298 +0000 UTC m=+997.526161317" watchObservedRunningTime="2025-10-02 21:40:06.207915226 +0000 UTC m=+997.530923245" Oct 02 21:40:08 crc kubenswrapper[4636]: I1002 21:40:08.189714 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" event={"ID":"31a7600e-116a-458a-9102-0e4207fcc73c","Type":"ContainerStarted","Data":"ed3455bf58a0dd625188f25520555bbd312f675fc2437fe97fbbfe403f0b317a"} Oct 02 21:40:08 crc kubenswrapper[4636]: I1002 21:40:08.190363 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" Oct 02 21:40:08 crc kubenswrapper[4636]: I1002 21:40:08.192102 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" event={"ID":"994cd5bb-107d-4426-9549-f5805479b1d7","Type":"ContainerStarted","Data":"5b0cb64e723067f04c45ab40df666ef1ed0b6677950f3c1ef89f5e86423339b2"} Oct 02 21:40:08 crc kubenswrapper[4636]: I1002 21:40:08.192304 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" Oct 02 21:40:08 crc kubenswrapper[4636]: I1002 21:40:08.205277 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" podStartSLOduration=4.3821133549999995 podStartE2EDuration="48.205259894s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:23.203784809 +0000 UTC m=+954.526792828" lastFinishedPulling="2025-10-02 21:40:07.026931348 +0000 UTC m=+998.349939367" observedRunningTime="2025-10-02 21:40:08.204612356 +0000 UTC m=+999.527620375" watchObservedRunningTime="2025-10-02 21:40:08.205259894 +0000 UTC m=+999.528267913" Oct 02 21:40:08 crc kubenswrapper[4636]: I1002 21:40:08.220437 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" podStartSLOduration=4.138421081 podStartE2EDuration="48.220419742s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:22.944385592 +0000 UTC m=+954.267393611" lastFinishedPulling="2025-10-02 21:40:07.026384253 +0000 UTC m=+998.349392272" observedRunningTime="2025-10-02 21:40:08.218877351 +0000 UTC m=+999.541885380" watchObservedRunningTime="2025-10-02 21:40:08.220419742 +0000 UTC m=+999.543427751" Oct 02 21:40:10 crc kubenswrapper[4636]: I1002 21:40:10.213703 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" event={"ID":"20dffa00-5117-4d3e-8b67-467357444816","Type":"ContainerStarted","Data":"fc24c9f4ed4b65e8d02f5be571f93c20c03065c1ed4b88ddeb09b0129c5d2ddf"} Oct 02 21:40:10 crc kubenswrapper[4636]: I1002 21:40:10.214274 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" Oct 02 21:40:10 crc kubenswrapper[4636]: I1002 21:40:10.218042 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" event={"ID":"a10a56aa-5ebe-4f98-8f99-fcba515c254d","Type":"ContainerStarted","Data":"a21cc571cc077e832aed515d98b7f13e325f0f323f0c88458b542350cf0e549a"} Oct 02 21:40:10 crc kubenswrapper[4636]: I1002 21:40:10.218301 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" Oct 02 21:40:10 crc kubenswrapper[4636]: I1002 21:40:10.237411 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" podStartSLOduration=4.379237877 podStartE2EDuration="50.237384618s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:23.197998753 +0000 UTC m=+954.521006772" lastFinishedPulling="2025-10-02 21:40:09.056145494 +0000 UTC m=+1000.379153513" observedRunningTime="2025-10-02 21:40:10.231222102 +0000 UTC m=+1001.554230121" watchObservedRunningTime="2025-10-02 21:40:10.237384618 +0000 UTC m=+1001.560392647" Oct 02 21:40:10 crc kubenswrapper[4636]: I1002 21:40:10.250116 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" podStartSLOduration=3.7106591570000003 podStartE2EDuration="50.25007727s" podCreationTimestamp="2025-10-02 21:39:20 +0000 UTC" firstStartedPulling="2025-10-02 21:39:22.516398282 +0000 UTC m=+953.839406301" lastFinishedPulling="2025-10-02 21:40:09.055816385 +0000 UTC m=+1000.378824414" observedRunningTime="2025-10-02 21:40:10.248033365 +0000 UTC m=+1001.571041384" watchObservedRunningTime="2025-10-02 21:40:10.25007727 +0000 UTC m=+1001.573085289" Oct 02 21:40:10 crc kubenswrapper[4636]: I1002 21:40:10.763298 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-ccnnb" Oct 02 21:40:20 crc kubenswrapper[4636]: I1002 21:40:20.498829 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-d74cn" Oct 02 21:40:20 crc kubenswrapper[4636]: I1002 21:40:20.797344 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-dpx7v" Oct 02 21:40:20 crc kubenswrapper[4636]: I1002 21:40:20.896446 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-z4lx2" Oct 02 21:40:21 crc kubenswrapper[4636]: I1002 21:40:21.121727 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-7vrb4" Oct 02 21:40:21 crc kubenswrapper[4636]: I1002 21:40:21.561250 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-g9jx5" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.199410 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9ng8f"] Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.203592 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.209870 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.209916 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.209917 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-qpcng" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.212821 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.213999 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9ng8f"] Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.279872 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-bwr6w"] Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.280972 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.287201 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.304016 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e63f507-92db-4512-9f13-1e9a9327335a-config\") pod \"dnsmasq-dns-675f4bcbfc-9ng8f\" (UID: \"8e63f507-92db-4512-9f13-1e9a9327335a\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.304080 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6kff\" (UniqueName: \"kubernetes.io/projected/8e63f507-92db-4512-9f13-1e9a9327335a-kube-api-access-v6kff\") pod \"dnsmasq-dns-675f4bcbfc-9ng8f\" (UID: \"8e63f507-92db-4512-9f13-1e9a9327335a\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.310460 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-bwr6w"] Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.405175 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e63f507-92db-4512-9f13-1e9a9327335a-config\") pod \"dnsmasq-dns-675f4bcbfc-9ng8f\" (UID: \"8e63f507-92db-4512-9f13-1e9a9327335a\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.405347 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-config\") pod \"dnsmasq-dns-78dd6ddcc-bwr6w\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.405433 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6kff\" (UniqueName: \"kubernetes.io/projected/8e63f507-92db-4512-9f13-1e9a9327335a-kube-api-access-v6kff\") pod \"dnsmasq-dns-675f4bcbfc-9ng8f\" (UID: \"8e63f507-92db-4512-9f13-1e9a9327335a\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.405530 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7krcr\" (UniqueName: \"kubernetes.io/projected/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-kube-api-access-7krcr\") pod \"dnsmasq-dns-78dd6ddcc-bwr6w\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.405617 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-bwr6w\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.406004 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e63f507-92db-4512-9f13-1e9a9327335a-config\") pod \"dnsmasq-dns-675f4bcbfc-9ng8f\" (UID: \"8e63f507-92db-4512-9f13-1e9a9327335a\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.437802 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6kff\" (UniqueName: \"kubernetes.io/projected/8e63f507-92db-4512-9f13-1e9a9327335a-kube-api-access-v6kff\") pod \"dnsmasq-dns-675f4bcbfc-9ng8f\" (UID: \"8e63f507-92db-4512-9f13-1e9a9327335a\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.507256 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-bwr6w\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.507395 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-config\") pod \"dnsmasq-dns-78dd6ddcc-bwr6w\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.507460 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7krcr\" (UniqueName: \"kubernetes.io/projected/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-kube-api-access-7krcr\") pod \"dnsmasq-dns-78dd6ddcc-bwr6w\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.508493 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-bwr6w\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.508995 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-config\") pod \"dnsmasq-dns-78dd6ddcc-bwr6w\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.525348 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7krcr\" (UniqueName: \"kubernetes.io/projected/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-kube-api-access-7krcr\") pod \"dnsmasq-dns-78dd6ddcc-bwr6w\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.530095 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" Oct 02 21:40:41 crc kubenswrapper[4636]: I1002 21:40:41.597088 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:40:42 crc kubenswrapper[4636]: I1002 21:40:42.002931 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9ng8f"] Oct 02 21:40:42 crc kubenswrapper[4636]: I1002 21:40:42.012514 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 21:40:42 crc kubenswrapper[4636]: I1002 21:40:42.134528 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-bwr6w"] Oct 02 21:40:42 crc kubenswrapper[4636]: W1002 21:40:42.142069 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c7e74de_2776_4205_9f6a_6709e0f4cfc7.slice/crio-b12ee2459cd6b5d3440ebfb96654995afe6b5a475725ed527ae7c60bab86e796 WatchSource:0}: Error finding container b12ee2459cd6b5d3440ebfb96654995afe6b5a475725ed527ae7c60bab86e796: Status 404 returned error can't find the container with id b12ee2459cd6b5d3440ebfb96654995afe6b5a475725ed527ae7c60bab86e796 Oct 02 21:40:42 crc kubenswrapper[4636]: I1002 21:40:42.475504 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" event={"ID":"8e63f507-92db-4512-9f13-1e9a9327335a","Type":"ContainerStarted","Data":"ae2b163abb3c4db9c8a527550e8ad44a0b0fbeb3b1b3555e05795f97ceb103fa"} Oct 02 21:40:42 crc kubenswrapper[4636]: I1002 21:40:42.477046 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" event={"ID":"1c7e74de-2776-4205-9f6a-6709e0f4cfc7","Type":"ContainerStarted","Data":"b12ee2459cd6b5d3440ebfb96654995afe6b5a475725ed527ae7c60bab86e796"} Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.595610 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9ng8f"] Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.622848 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-gvl6d"] Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.624003 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.644626 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-gvl6d"] Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.772933 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87ffq\" (UniqueName: \"kubernetes.io/projected/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-kube-api-access-87ffq\") pod \"dnsmasq-dns-5ccc8479f9-gvl6d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.773327 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-config\") pod \"dnsmasq-dns-5ccc8479f9-gvl6d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.773366 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-gvl6d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.874473 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-config\") pod \"dnsmasq-dns-5ccc8479f9-gvl6d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.874554 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-gvl6d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.874596 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87ffq\" (UniqueName: \"kubernetes.io/projected/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-kube-api-access-87ffq\") pod \"dnsmasq-dns-5ccc8479f9-gvl6d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.875848 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-config\") pod \"dnsmasq-dns-5ccc8479f9-gvl6d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.875912 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-gvl6d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.910923 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87ffq\" (UniqueName: \"kubernetes.io/projected/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-kube-api-access-87ffq\") pod \"dnsmasq-dns-5ccc8479f9-gvl6d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:40:44 crc kubenswrapper[4636]: I1002 21:40:44.951403 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.043845 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-bwr6w"] Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.088223 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-fbfcm"] Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.089423 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.111689 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-fbfcm"] Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.182140 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-fbfcm\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.182381 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-config\") pod \"dnsmasq-dns-57d769cc4f-fbfcm\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.182978 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g7l4\" (UniqueName: \"kubernetes.io/projected/56022ebb-4c18-4c79-a7e5-14081da5735d-kube-api-access-5g7l4\") pod \"dnsmasq-dns-57d769cc4f-fbfcm\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.284306 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-config\") pod \"dnsmasq-dns-57d769cc4f-fbfcm\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.284663 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g7l4\" (UniqueName: \"kubernetes.io/projected/56022ebb-4c18-4c79-a7e5-14081da5735d-kube-api-access-5g7l4\") pod \"dnsmasq-dns-57d769cc4f-fbfcm\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.284723 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-fbfcm\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.285686 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-fbfcm\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.286982 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-config\") pod \"dnsmasq-dns-57d769cc4f-fbfcm\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.310708 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g7l4\" (UniqueName: \"kubernetes.io/projected/56022ebb-4c18-4c79-a7e5-14081da5735d-kube-api-access-5g7l4\") pod \"dnsmasq-dns-57d769cc4f-fbfcm\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.411605 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.649582 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-gvl6d"] Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.822108 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.824213 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.829934 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.830013 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.830149 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.830172 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-zl72l" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.830229 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.832179 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.832183 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.853191 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.901263 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-568pj\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-kube-api-access-568pj\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.901363 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.901386 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.901445 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.901515 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.901537 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.901671 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.901810 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.901851 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.901892 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.901907 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:45 crc kubenswrapper[4636]: I1002 21:40:45.910538 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-fbfcm"] Oct 02 21:40:45 crc kubenswrapper[4636]: W1002 21:40:45.922148 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56022ebb_4c18_4c79_a7e5_14081da5735d.slice/crio-8488495a949c18a633f8096c758eefd4f8d76add92f498acba375dc25538bd6a WatchSource:0}: Error finding container 8488495a949c18a633f8096c758eefd4f8d76add92f498acba375dc25538bd6a: Status 404 returned error can't find the container with id 8488495a949c18a633f8096c758eefd4f8d76add92f498acba375dc25538bd6a Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.003691 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.003734 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.003795 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.003847 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.003868 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.003892 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.003923 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.003941 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.003964 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.003998 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.004017 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-568pj\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-kube-api-access-568pj\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.004299 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.004551 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.005975 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.006462 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.007879 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.016197 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.016334 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.017312 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.020393 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.020569 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-568pj\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-kube-api-access-568pj\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.022860 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.031845 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.150997 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.251570 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.259290 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.279841 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.280815 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.281192 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.281274 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-vvlmg" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.283316 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.284325 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.285839 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.326993 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.410847 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.410909 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkmpz\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-kube-api-access-fkmpz\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.410937 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.410961 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.410995 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.411017 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.411031 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-server-conf\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.411052 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/780601d4-af7f-47ee-b580-939d5531e805-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.411077 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.411091 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-config-data\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.411107 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/780601d4-af7f-47ee-b580-939d5531e805-pod-info\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.511849 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.511894 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-config-data\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.511915 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/780601d4-af7f-47ee-b580-939d5531e805-pod-info\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.511979 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.512008 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkmpz\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-kube-api-access-fkmpz\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.512030 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.512052 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.512082 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.512123 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.512138 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-server-conf\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.512173 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/780601d4-af7f-47ee-b580-939d5531e805-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.513691 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.514419 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.514520 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.514858 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-server-conf\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.514297 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-config-data\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.515284 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.522927 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/780601d4-af7f-47ee-b580-939d5531e805-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.526478 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/780601d4-af7f-47ee-b580-939d5531e805-pod-info\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.527962 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" event={"ID":"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d","Type":"ContainerStarted","Data":"f46698d302657cff4660ff48d4ce979850bbdcabb1de5bf639863377f79deef8"} Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.529257 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.532509 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" event={"ID":"56022ebb-4c18-4c79-a7e5-14081da5735d","Type":"ContainerStarted","Data":"8488495a949c18a633f8096c758eefd4f8d76add92f498acba375dc25538bd6a"} Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.539919 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkmpz\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-kube-api-access-fkmpz\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.574896 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.576156 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " pod="openstack/rabbitmq-server-0" Oct 02 21:40:46 crc kubenswrapper[4636]: I1002 21:40:46.638135 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.673524 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.679587 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.690280 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-bv8s5" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.690285 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.690431 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.691563 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.692138 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.694044 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.694338 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.740193 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/80a6d533-3442-4d4f-be04-1e95eefb5598-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.740241 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80a6d533-3442-4d4f-be04-1e95eefb5598-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.740285 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/80a6d533-3442-4d4f-be04-1e95eefb5598-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.740304 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/80a6d533-3442-4d4f-be04-1e95eefb5598-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.740330 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a6d533-3442-4d4f-be04-1e95eefb5598-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.740347 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a6d533-3442-4d4f-be04-1e95eefb5598-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.740372 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/80a6d533-3442-4d4f-be04-1e95eefb5598-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.740496 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.740513 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpx5f\" (UniqueName: \"kubernetes.io/projected/80a6d533-3442-4d4f-be04-1e95eefb5598-kube-api-access-dpx5f\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.842302 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.842352 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpx5f\" (UniqueName: \"kubernetes.io/projected/80a6d533-3442-4d4f-be04-1e95eefb5598-kube-api-access-dpx5f\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.842370 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/80a6d533-3442-4d4f-be04-1e95eefb5598-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.842416 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80a6d533-3442-4d4f-be04-1e95eefb5598-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.842512 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/80a6d533-3442-4d4f-be04-1e95eefb5598-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.842531 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/80a6d533-3442-4d4f-be04-1e95eefb5598-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.842570 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a6d533-3442-4d4f-be04-1e95eefb5598-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.842585 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.844627 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/80a6d533-3442-4d4f-be04-1e95eefb5598-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.842603 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a6d533-3442-4d4f-be04-1e95eefb5598-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.844796 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/80a6d533-3442-4d4f-be04-1e95eefb5598-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.844909 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/80a6d533-3442-4d4f-be04-1e95eefb5598-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.845384 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80a6d533-3442-4d4f-be04-1e95eefb5598-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.845855 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/80a6d533-3442-4d4f-be04-1e95eefb5598-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.848308 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a6d533-3442-4d4f-be04-1e95eefb5598-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.853890 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/80a6d533-3442-4d4f-be04-1e95eefb5598-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.859660 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a6d533-3442-4d4f-be04-1e95eefb5598-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.860459 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:47 crc kubenswrapper[4636]: I1002 21:40:47.875249 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpx5f\" (UniqueName: \"kubernetes.io/projected/80a6d533-3442-4d4f-be04-1e95eefb5598-kube-api-access-dpx5f\") pod \"openstack-cell1-galera-0\" (UID: \"80a6d533-3442-4d4f-be04-1e95eefb5598\") " pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.010087 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.376143 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.377325 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.380776 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-rjh6f" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.381803 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.382105 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.382494 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.402126 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.459867 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f2b95105-58f6-4984-92c7-d3dbc7dfa131-config-data-default\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.459912 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f2b95105-58f6-4984-92c7-d3dbc7dfa131-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.459932 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f2b95105-58f6-4984-92c7-d3dbc7dfa131-kolla-config\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.459957 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2b95105-58f6-4984-92c7-d3dbc7dfa131-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.459974 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2b95105-58f6-4984-92c7-d3dbc7dfa131-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.460268 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gm6w\" (UniqueName: \"kubernetes.io/projected/f2b95105-58f6-4984-92c7-d3dbc7dfa131-kube-api-access-4gm6w\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.460465 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.460527 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b95105-58f6-4984-92c7-d3dbc7dfa131-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.460556 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f2b95105-58f6-4984-92c7-d3dbc7dfa131-secrets\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.562356 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.563120 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.563340 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b95105-58f6-4984-92c7-d3dbc7dfa131-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.563370 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f2b95105-58f6-4984-92c7-d3dbc7dfa131-secrets\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.563427 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f2b95105-58f6-4984-92c7-d3dbc7dfa131-config-data-default\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.563452 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f2b95105-58f6-4984-92c7-d3dbc7dfa131-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.563469 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f2b95105-58f6-4984-92c7-d3dbc7dfa131-kolla-config\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.563490 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2b95105-58f6-4984-92c7-d3dbc7dfa131-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.563505 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2b95105-58f6-4984-92c7-d3dbc7dfa131-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.563524 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gm6w\" (UniqueName: \"kubernetes.io/projected/f2b95105-58f6-4984-92c7-d3dbc7dfa131-kube-api-access-4gm6w\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.564023 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f2b95105-58f6-4984-92c7-d3dbc7dfa131-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.564988 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f2b95105-58f6-4984-92c7-d3dbc7dfa131-config-data-default\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.566031 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f2b95105-58f6-4984-92c7-d3dbc7dfa131-kolla-config\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.566265 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b95105-58f6-4984-92c7-d3dbc7dfa131-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.570526 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f2b95105-58f6-4984-92c7-d3dbc7dfa131-secrets\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.570696 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2b95105-58f6-4984-92c7-d3dbc7dfa131-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.579217 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2b95105-58f6-4984-92c7-d3dbc7dfa131-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.582140 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gm6w\" (UniqueName: \"kubernetes.io/projected/f2b95105-58f6-4984-92c7-d3dbc7dfa131-kube-api-access-4gm6w\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.605442 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"f2b95105-58f6-4984-92c7-d3dbc7dfa131\") " pod="openstack/openstack-galera-0" Oct 02 21:40:48 crc kubenswrapper[4636]: I1002 21:40:48.706331 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.018154 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.021497 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.022737 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.023977 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-dj6p6" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.024098 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.024158 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.074121 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d85e3d9d-106a-4b20-8496-7394884ca255-kolla-config\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.074228 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d85e3d9d-106a-4b20-8496-7394884ca255-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.074366 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl8cm\" (UniqueName: \"kubernetes.io/projected/d85e3d9d-106a-4b20-8496-7394884ca255-kube-api-access-tl8cm\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.074406 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d85e3d9d-106a-4b20-8496-7394884ca255-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.074528 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d85e3d9d-106a-4b20-8496-7394884ca255-config-data\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.176204 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d85e3d9d-106a-4b20-8496-7394884ca255-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.176285 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl8cm\" (UniqueName: \"kubernetes.io/projected/d85e3d9d-106a-4b20-8496-7394884ca255-kube-api-access-tl8cm\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.176309 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d85e3d9d-106a-4b20-8496-7394884ca255-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.176356 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d85e3d9d-106a-4b20-8496-7394884ca255-config-data\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.176393 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d85e3d9d-106a-4b20-8496-7394884ca255-kolla-config\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.177074 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d85e3d9d-106a-4b20-8496-7394884ca255-kolla-config\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.179218 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d85e3d9d-106a-4b20-8496-7394884ca255-config-data\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.184205 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d85e3d9d-106a-4b20-8496-7394884ca255-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.197880 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d85e3d9d-106a-4b20-8496-7394884ca255-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.216014 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl8cm\" (UniqueName: \"kubernetes.io/projected/d85e3d9d-106a-4b20-8496-7394884ca255-kube-api-access-tl8cm\") pod \"memcached-0\" (UID: \"d85e3d9d-106a-4b20-8496-7394884ca255\") " pod="openstack/memcached-0" Oct 02 21:40:49 crc kubenswrapper[4636]: I1002 21:40:49.336296 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 02 21:40:51 crc kubenswrapper[4636]: I1002 21:40:51.104796 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 02 21:40:51 crc kubenswrapper[4636]: I1002 21:40:51.109671 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 02 21:40:51 crc kubenswrapper[4636]: I1002 21:40:51.115147 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-b5h96" Oct 02 21:40:51 crc kubenswrapper[4636]: I1002 21:40:51.146779 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 02 21:40:51 crc kubenswrapper[4636]: I1002 21:40:51.206486 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ls8d8\" (UniqueName: \"kubernetes.io/projected/c1312598-8735-44c7-a810-4bb4c57e5fba-kube-api-access-ls8d8\") pod \"kube-state-metrics-0\" (UID: \"c1312598-8735-44c7-a810-4bb4c57e5fba\") " pod="openstack/kube-state-metrics-0" Oct 02 21:40:51 crc kubenswrapper[4636]: I1002 21:40:51.308350 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ls8d8\" (UniqueName: \"kubernetes.io/projected/c1312598-8735-44c7-a810-4bb4c57e5fba-kube-api-access-ls8d8\") pod \"kube-state-metrics-0\" (UID: \"c1312598-8735-44c7-a810-4bb4c57e5fba\") " pod="openstack/kube-state-metrics-0" Oct 02 21:40:51 crc kubenswrapper[4636]: I1002 21:40:51.327878 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ls8d8\" (UniqueName: \"kubernetes.io/projected/c1312598-8735-44c7-a810-4bb4c57e5fba-kube-api-access-ls8d8\") pod \"kube-state-metrics-0\" (UID: \"c1312598-8735-44c7-a810-4bb4c57e5fba\") " pod="openstack/kube-state-metrics-0" Oct 02 21:40:51 crc kubenswrapper[4636]: I1002 21:40:51.428817 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.423252 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-b5dv2"] Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.424661 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.430078 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.430090 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-txgg2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.430907 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.441216 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-b5dv2"] Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.494540 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-ffvpv"] Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.501634 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.561804 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkcbv\" (UniqueName: \"kubernetes.io/projected/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-kube-api-access-vkcbv\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.561870 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-etc-ovs\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.561901 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-combined-ca-bundle\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.561919 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-var-run-ovn\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.561950 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-var-log-ovn\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.561968 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-scripts\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.561982 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-ovn-controller-tls-certs\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.562002 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-scripts\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.562099 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-var-log\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.562132 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-var-lib\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.562204 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-var-run\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.562233 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-var-run\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.562262 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp4c5\" (UniqueName: \"kubernetes.io/projected/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-kube-api-access-zp4c5\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.600407 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ffvpv"] Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.662976 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-var-run\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663022 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-var-run\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663054 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp4c5\" (UniqueName: \"kubernetes.io/projected/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-kube-api-access-zp4c5\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663072 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkcbv\" (UniqueName: \"kubernetes.io/projected/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-kube-api-access-vkcbv\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663113 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-etc-ovs\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663139 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-combined-ca-bundle\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663154 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-var-run-ovn\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663179 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-var-log-ovn\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663196 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-scripts\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663211 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-ovn-controller-tls-certs\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663229 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-scripts\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663251 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-var-lib\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663265 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-var-log\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663604 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-var-run\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.663626 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-var-run\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.664921 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-etc-ovs\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.665860 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-var-run-ovn\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.666030 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-var-log-ovn\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.666191 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-var-lib\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.666516 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-scripts\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.666866 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-var-log\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.667470 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-scripts\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.671623 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-ovn-controller-tls-certs\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.672691 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-combined-ca-bundle\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.698318 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkcbv\" (UniqueName: \"kubernetes.io/projected/189fcf5f-fafd-4af5-9b02-d8d33b6bfe65-kube-api-access-vkcbv\") pod \"ovn-controller-b5dv2\" (UID: \"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65\") " pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.700537 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.701809 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.703376 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp4c5\" (UniqueName: \"kubernetes.io/projected/1ba9bb1f-b0f2-4379-8ac3-7862638e6661-kube-api-access-zp4c5\") pod \"ovn-controller-ovs-ffvpv\" (UID: \"1ba9bb1f-b0f2-4379-8ac3-7862638e6661\") " pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.707026 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.707294 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-k69m6" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.707466 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.713396 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.719031 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.739787 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b5dv2" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.740180 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.839088 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.866740 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.866804 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce7eeaa-df59-4316-835f-9f0c2f233a53-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.866833 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bce7eeaa-df59-4316-835f-9f0c2f233a53-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.866854 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bce7eeaa-df59-4316-835f-9f0c2f233a53-config\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.866920 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce7eeaa-df59-4316-835f-9f0c2f233a53-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.866949 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce7eeaa-df59-4316-835f-9f0c2f233a53-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.866967 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs622\" (UniqueName: \"kubernetes.io/projected/bce7eeaa-df59-4316-835f-9f0c2f233a53-kube-api-access-fs622\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.866995 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bce7eeaa-df59-4316-835f-9f0c2f233a53-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.968876 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.968940 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce7eeaa-df59-4316-835f-9f0c2f233a53-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.968967 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bce7eeaa-df59-4316-835f-9f0c2f233a53-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.969002 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bce7eeaa-df59-4316-835f-9f0c2f233a53-config\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.969038 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce7eeaa-df59-4316-835f-9f0c2f233a53-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.969083 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce7eeaa-df59-4316-835f-9f0c2f233a53-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.969101 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs622\" (UniqueName: \"kubernetes.io/projected/bce7eeaa-df59-4316-835f-9f0c2f233a53-kube-api-access-fs622\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.969199 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.969587 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bce7eeaa-df59-4316-835f-9f0c2f233a53-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.969130 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bce7eeaa-df59-4316-835f-9f0c2f233a53-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.970370 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bce7eeaa-df59-4316-835f-9f0c2f233a53-config\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.970622 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bce7eeaa-df59-4316-835f-9f0c2f233a53-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.973140 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce7eeaa-df59-4316-835f-9f0c2f233a53-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.974978 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce7eeaa-df59-4316-835f-9f0c2f233a53-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.975452 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce7eeaa-df59-4316-835f-9f0c2f233a53-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:54 crc kubenswrapper[4636]: I1002 21:40:54.987412 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs622\" (UniqueName: \"kubernetes.io/projected/bce7eeaa-df59-4316-835f-9f0c2f233a53-kube-api-access-fs622\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:55 crc kubenswrapper[4636]: I1002 21:40:55.003162 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bce7eeaa-df59-4316-835f-9f0c2f233a53\") " pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:55 crc kubenswrapper[4636]: I1002 21:40:55.086572 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 02 21:40:55 crc kubenswrapper[4636]: I1002 21:40:55.159082 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.049536 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.051774 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.055048 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.055180 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.056317 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.057611 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-mfxct" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.078515 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.123039 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aea58193-4a02-48f4-b4c6-b0938df463ff-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.123337 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea58193-4a02-48f4-b4c6-b0938df463ff-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.123426 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n224f\" (UniqueName: \"kubernetes.io/projected/aea58193-4a02-48f4-b4c6-b0938df463ff-kube-api-access-n224f\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.123763 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aea58193-4a02-48f4-b4c6-b0938df463ff-config\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.123925 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.124017 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea58193-4a02-48f4-b4c6-b0938df463ff-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.124058 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aea58193-4a02-48f4-b4c6-b0938df463ff-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.124093 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea58193-4a02-48f4-b4c6-b0938df463ff-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.225452 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aea58193-4a02-48f4-b4c6-b0938df463ff-config\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.225790 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.225898 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea58193-4a02-48f4-b4c6-b0938df463ff-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.225986 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aea58193-4a02-48f4-b4c6-b0938df463ff-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.226061 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea58193-4a02-48f4-b4c6-b0938df463ff-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.226150 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aea58193-4a02-48f4-b4c6-b0938df463ff-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.226225 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea58193-4a02-48f4-b4c6-b0938df463ff-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.226309 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n224f\" (UniqueName: \"kubernetes.io/projected/aea58193-4a02-48f4-b4c6-b0938df463ff-kube-api-access-n224f\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.226323 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aea58193-4a02-48f4-b4c6-b0938df463ff-config\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.226486 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aea58193-4a02-48f4-b4c6-b0938df463ff-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.226189 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.227363 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aea58193-4a02-48f4-b4c6-b0938df463ff-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.231266 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea58193-4a02-48f4-b4c6-b0938df463ff-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.241523 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea58193-4a02-48f4-b4c6-b0938df463ff-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.246222 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n224f\" (UniqueName: \"kubernetes.io/projected/aea58193-4a02-48f4-b4c6-b0938df463ff-kube-api-access-n224f\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.247686 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea58193-4a02-48f4-b4c6-b0938df463ff-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.254654 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"aea58193-4a02-48f4-b4c6-b0938df463ff\") " pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:58 crc kubenswrapper[4636]: I1002 21:40:58.367701 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 02 21:40:59 crc kubenswrapper[4636]: W1002 21:40:59.348631 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a227f5a_7dbd_49f9_b8a0_62c3911fefa9.slice/crio-f80d1359216378e382bab24740336e669283d37ff98c6f40b15f9867e938dadf WatchSource:0}: Error finding container f80d1359216378e382bab24740336e669283d37ff98c6f40b15f9867e938dadf: Status 404 returned error can't find the container with id f80d1359216378e382bab24740336e669283d37ff98c6f40b15f9867e938dadf Oct 02 21:40:59 crc kubenswrapper[4636]: I1002 21:40:59.651741 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9","Type":"ContainerStarted","Data":"f80d1359216378e382bab24740336e669283d37ff98c6f40b15f9867e938dadf"} Oct 02 21:41:00 crc kubenswrapper[4636]: E1002 21:41:00.234921 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 02 21:41:00 crc kubenswrapper[4636]: E1002 21:41:00.235426 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7krcr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-bwr6w_openstack(1c7e74de-2776-4205-9f6a-6709e0f4cfc7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:41:00 crc kubenswrapper[4636]: E1002 21:41:00.236874 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" podUID="1c7e74de-2776-4205-9f6a-6709e0f4cfc7" Oct 02 21:41:00 crc kubenswrapper[4636]: E1002 21:41:00.252323 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 02 21:41:00 crc kubenswrapper[4636]: E1002 21:41:00.252479 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v6kff,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-9ng8f_openstack(8e63f507-92db-4512-9f13-1e9a9327335a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:41:00 crc kubenswrapper[4636]: E1002 21:41:00.254807 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" podUID="8e63f507-92db-4512-9f13-1e9a9327335a" Oct 02 21:41:00 crc kubenswrapper[4636]: I1002 21:41:00.935118 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 02 21:41:00 crc kubenswrapper[4636]: I1002 21:41:00.940370 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 02 21:41:00 crc kubenswrapper[4636]: W1002 21:41:00.955349 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod780601d4_af7f_47ee_b580_939d5531e805.slice/crio-44b07f54af556a5bd7326d5cf80b28674cea14bbf2e3eb26bceeae50cd523de4 WatchSource:0}: Error finding container 44b07f54af556a5bd7326d5cf80b28674cea14bbf2e3eb26bceeae50cd523de4: Status 404 returned error can't find the container with id 44b07f54af556a5bd7326d5cf80b28674cea14bbf2e3eb26bceeae50cd523de4 Oct 02 21:41:00 crc kubenswrapper[4636]: W1002 21:41:00.959895 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80a6d533_3442_4d4f_be04_1e95eefb5598.slice/crio-3b5fa40d68df4a2f5f99976d510e4616ff72aea88813671cc97391ec1c520754 WatchSource:0}: Error finding container 3b5fa40d68df4a2f5f99976d510e4616ff72aea88813671cc97391ec1c520754: Status 404 returned error can't find the container with id 3b5fa40d68df4a2f5f99976d510e4616ff72aea88813671cc97391ec1c520754 Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.073028 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 02 21:41:01 crc kubenswrapper[4636]: W1002 21:41:01.076924 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1312598_8735_44c7_a810_4bb4c57e5fba.slice/crio-cd9ddf3076f16007decd30bf0fd25f2bc9af1e8f613b5e000e4c94e131768821 WatchSource:0}: Error finding container cd9ddf3076f16007decd30bf0fd25f2bc9af1e8f613b5e000e4c94e131768821: Status 404 returned error can't find the container with id cd9ddf3076f16007decd30bf0fd25f2bc9af1e8f613b5e000e4c94e131768821 Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.079963 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 02 21:41:01 crc kubenswrapper[4636]: W1002 21:41:01.085198 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2b95105_58f6_4984_92c7_d3dbc7dfa131.slice/crio-b7eccc3e91f537813945b58d54dd9de11032cebc6dd8b4db265f3f574a023000 WatchSource:0}: Error finding container b7eccc3e91f537813945b58d54dd9de11032cebc6dd8b4db265f3f574a023000: Status 404 returned error can't find the container with id b7eccc3e91f537813945b58d54dd9de11032cebc6dd8b4db265f3f574a023000 Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.251132 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.262168 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.289102 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-dns-svc\") pod \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.289148 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7krcr\" (UniqueName: \"kubernetes.io/projected/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-kube-api-access-7krcr\") pod \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.289237 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e63f507-92db-4512-9f13-1e9a9327335a-config\") pod \"8e63f507-92db-4512-9f13-1e9a9327335a\" (UID: \"8e63f507-92db-4512-9f13-1e9a9327335a\") " Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.289352 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6kff\" (UniqueName: \"kubernetes.io/projected/8e63f507-92db-4512-9f13-1e9a9327335a-kube-api-access-v6kff\") pod \"8e63f507-92db-4512-9f13-1e9a9327335a\" (UID: \"8e63f507-92db-4512-9f13-1e9a9327335a\") " Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.289371 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-config\") pod \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\" (UID: \"1c7e74de-2776-4205-9f6a-6709e0f4cfc7\") " Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.289682 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e63f507-92db-4512-9f13-1e9a9327335a-config" (OuterVolumeSpecName: "config") pod "8e63f507-92db-4512-9f13-1e9a9327335a" (UID: "8e63f507-92db-4512-9f13-1e9a9327335a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.289782 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1c7e74de-2776-4205-9f6a-6709e0f4cfc7" (UID: "1c7e74de-2776-4205-9f6a-6709e0f4cfc7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.289889 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-config" (OuterVolumeSpecName: "config") pod "1c7e74de-2776-4205-9f6a-6709e0f4cfc7" (UID: "1c7e74de-2776-4205-9f6a-6709e0f4cfc7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.290220 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.290232 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.290240 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e63f507-92db-4512-9f13-1e9a9327335a-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.294331 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e63f507-92db-4512-9f13-1e9a9327335a-kube-api-access-v6kff" (OuterVolumeSpecName: "kube-api-access-v6kff") pod "8e63f507-92db-4512-9f13-1e9a9327335a" (UID: "8e63f507-92db-4512-9f13-1e9a9327335a"). InnerVolumeSpecName "kube-api-access-v6kff". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.294490 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-kube-api-access-7krcr" (OuterVolumeSpecName: "kube-api-access-7krcr") pod "1c7e74de-2776-4205-9f6a-6709e0f4cfc7" (UID: "1c7e74de-2776-4205-9f6a-6709e0f4cfc7"). InnerVolumeSpecName "kube-api-access-7krcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.391831 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7krcr\" (UniqueName: \"kubernetes.io/projected/1c7e74de-2776-4205-9f6a-6709e0f4cfc7-kube-api-access-7krcr\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.392021 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6kff\" (UniqueName: \"kubernetes.io/projected/8e63f507-92db-4512-9f13-1e9a9327335a-kube-api-access-v6kff\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.405004 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-b5dv2"] Oct 02 21:41:01 crc kubenswrapper[4636]: W1002 21:41:01.426626 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod189fcf5f_fafd_4af5_9b02_d8d33b6bfe65.slice/crio-fafeb0bd37bdcb177a8a4c8b16c3489dd1e912f63d2ce4a1921945f032672bc5 WatchSource:0}: Error finding container fafeb0bd37bdcb177a8a4c8b16c3489dd1e912f63d2ce4a1921945f032672bc5: Status 404 returned error can't find the container with id fafeb0bd37bdcb177a8a4c8b16c3489dd1e912f63d2ce4a1921945f032672bc5 Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.431863 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.495785 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 02 21:41:01 crc kubenswrapper[4636]: W1002 21:41:01.501594 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbce7eeaa_df59_4316_835f_9f0c2f233a53.slice/crio-a664dae87e79ca2822bbe88f91a09795d901423960fe3295b9ddd14bb7db76d8 WatchSource:0}: Error finding container a664dae87e79ca2822bbe88f91a09795d901423960fe3295b9ddd14bb7db76d8: Status 404 returned error can't find the container with id a664dae87e79ca2822bbe88f91a09795d901423960fe3295b9ddd14bb7db76d8 Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.600344 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ffvpv"] Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.671042 4636 generic.go:334] "Generic (PLEG): container finished" podID="6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" containerID="7014da456eb2a717bdbd2c77925a0453f8b049ddbed57ce8c589961135c7bedc" exitCode=0 Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.671126 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" event={"ID":"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d","Type":"ContainerDied","Data":"7014da456eb2a717bdbd2c77925a0453f8b049ddbed57ce8c589961135c7bedc"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.680521 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"780601d4-af7f-47ee-b580-939d5531e805","Type":"ContainerStarted","Data":"44b07f54af556a5bd7326d5cf80b28674cea14bbf2e3eb26bceeae50cd523de4"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.682248 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f2b95105-58f6-4984-92c7-d3dbc7dfa131","Type":"ContainerStarted","Data":"b7eccc3e91f537813945b58d54dd9de11032cebc6dd8b4db265f3f574a023000"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.684350 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.684354 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-9ng8f" event={"ID":"8e63f507-92db-4512-9f13-1e9a9327335a","Type":"ContainerDied","Data":"ae2b163abb3c4db9c8a527550e8ad44a0b0fbeb3b1b3555e05795f97ceb103fa"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.703944 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.704012 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-bwr6w" event={"ID":"1c7e74de-2776-4205-9f6a-6709e0f4cfc7","Type":"ContainerDied","Data":"b12ee2459cd6b5d3440ebfb96654995afe6b5a475725ed527ae7c60bab86e796"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.706501 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c1312598-8735-44c7-a810-4bb4c57e5fba","Type":"ContainerStarted","Data":"cd9ddf3076f16007decd30bf0fd25f2bc9af1e8f613b5e000e4c94e131768821"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.708125 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"80a6d533-3442-4d4f-be04-1e95eefb5598","Type":"ContainerStarted","Data":"3b5fa40d68df4a2f5f99976d510e4616ff72aea88813671cc97391ec1c520754"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.709518 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ffvpv" event={"ID":"1ba9bb1f-b0f2-4379-8ac3-7862638e6661","Type":"ContainerStarted","Data":"aa30f295686bda1a02af63b2d0c888413bd5fb12109aff60d5acd3a14247957a"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.711222 4636 generic.go:334] "Generic (PLEG): container finished" podID="56022ebb-4c18-4c79-a7e5-14081da5735d" containerID="84b69ba1ef6bdad08cf2ae8d6ec8a542e58e98dbfb773f6fa4f0037a2dc37a45" exitCode=0 Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.711285 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" event={"ID":"56022ebb-4c18-4c79-a7e5-14081da5735d","Type":"ContainerDied","Data":"84b69ba1ef6bdad08cf2ae8d6ec8a542e58e98dbfb773f6fa4f0037a2dc37a45"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.712398 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b5dv2" event={"ID":"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65","Type":"ContainerStarted","Data":"fafeb0bd37bdcb177a8a4c8b16c3489dd1e912f63d2ce4a1921945f032672bc5"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.713352 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d85e3d9d-106a-4b20-8496-7394884ca255","Type":"ContainerStarted","Data":"55d68a21b31d13f88b52dd5b07cc19dc2f646d4fd8b258803e694d3780badb95"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.714646 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bce7eeaa-df59-4316-835f-9f0c2f233a53","Type":"ContainerStarted","Data":"a664dae87e79ca2822bbe88f91a09795d901423960fe3295b9ddd14bb7db76d8"} Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.744784 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9ng8f"] Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.751475 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9ng8f"] Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.792616 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-bwr6w"] Oct 02 21:41:01 crc kubenswrapper[4636]: I1002 21:41:01.802012 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-bwr6w"] Oct 02 21:41:02 crc kubenswrapper[4636]: I1002 21:41:02.094817 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 02 21:41:03 crc kubenswrapper[4636]: I1002 21:41:03.619265 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c7e74de-2776-4205-9f6a-6709e0f4cfc7" path="/var/lib/kubelet/pods/1c7e74de-2776-4205-9f6a-6709e0f4cfc7/volumes" Oct 02 21:41:03 crc kubenswrapper[4636]: I1002 21:41:03.619930 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e63f507-92db-4512-9f13-1e9a9327335a" path="/var/lib/kubelet/pods/8e63f507-92db-4512-9f13-1e9a9327335a/volumes" Oct 02 21:41:07 crc kubenswrapper[4636]: W1002 21:41:07.381314 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaea58193_4a02_48f4_b4c6_b0938df463ff.slice/crio-ad81ecc54b5b281ef527ee8023390e05938391793ed87d367e243b82d9868a1b WatchSource:0}: Error finding container ad81ecc54b5b281ef527ee8023390e05938391793ed87d367e243b82d9868a1b: Status 404 returned error can't find the container with id ad81ecc54b5b281ef527ee8023390e05938391793ed87d367e243b82d9868a1b Oct 02 21:41:07 crc kubenswrapper[4636]: I1002 21:41:07.764626 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" event={"ID":"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d","Type":"ContainerStarted","Data":"898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c"} Oct 02 21:41:07 crc kubenswrapper[4636]: I1002 21:41:07.764725 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:41:07 crc kubenswrapper[4636]: I1002 21:41:07.766821 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aea58193-4a02-48f4-b4c6-b0938df463ff","Type":"ContainerStarted","Data":"ad81ecc54b5b281ef527ee8023390e05938391793ed87d367e243b82d9868a1b"} Oct 02 21:41:07 crc kubenswrapper[4636]: I1002 21:41:07.786822 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" podStartSLOduration=9.009646933 podStartE2EDuration="23.786805476s" podCreationTimestamp="2025-10-02 21:40:44 +0000 UTC" firstStartedPulling="2025-10-02 21:40:45.682064369 +0000 UTC m=+1037.005072388" lastFinishedPulling="2025-10-02 21:41:00.459222912 +0000 UTC m=+1051.782230931" observedRunningTime="2025-10-02 21:41:07.784939876 +0000 UTC m=+1059.107947895" watchObservedRunningTime="2025-10-02 21:41:07.786805476 +0000 UTC m=+1059.109813485" Oct 02 21:41:13 crc kubenswrapper[4636]: E1002 21:41:13.565466 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Oct 02 21:41:13 crc kubenswrapper[4636]: E1002 21:41:13.566015 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:DB_ROOT_PASSWORD,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:DbRootPassword,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secrets,ReadOnly:true,MountPath:/var/lib/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dpx5f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(80a6d533-3442-4d4f-be04-1e95eefb5598): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:41:13 crc kubenswrapper[4636]: E1002 21:41:13.567190 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="80a6d533-3442-4d4f-be04-1e95eefb5598" Oct 02 21:41:13 crc kubenswrapper[4636]: E1002 21:41:13.641046 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Oct 02 21:41:13 crc kubenswrapper[4636]: E1002 21:41:13.641304 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:DB_ROOT_PASSWORD,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:DbRootPassword,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secrets,ReadOnly:true,MountPath:/var/lib/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4gm6w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(f2b95105-58f6-4984-92c7-d3dbc7dfa131): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:41:13 crc kubenswrapper[4636]: E1002 21:41:13.642888 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="f2b95105-58f6-4984-92c7-d3dbc7dfa131" Oct 02 21:41:13 crc kubenswrapper[4636]: E1002 21:41:13.847641 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="80a6d533-3442-4d4f-be04-1e95eefb5598" Oct 02 21:41:13 crc kubenswrapper[4636]: E1002 21:41:13.850318 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="f2b95105-58f6-4984-92c7-d3dbc7dfa131" Oct 02 21:41:14 crc kubenswrapper[4636]: I1002 21:41:14.954351 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:41:16 crc kubenswrapper[4636]: I1002 21:41:16.879103 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" event={"ID":"56022ebb-4c18-4c79-a7e5-14081da5735d","Type":"ContainerStarted","Data":"92e1800dd7ea5f3915156a0b34862f941ea83016fd9ab0a27a0af33ddd5ada71"} Oct 02 21:41:16 crc kubenswrapper[4636]: I1002 21:41:16.879641 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:41:16 crc kubenswrapper[4636]: I1002 21:41:16.902363 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" podStartSLOduration=17.437325201 podStartE2EDuration="31.902341633s" podCreationTimestamp="2025-10-02 21:40:45 +0000 UTC" firstStartedPulling="2025-10-02 21:40:45.929224089 +0000 UTC m=+1037.252232108" lastFinishedPulling="2025-10-02 21:41:00.394240521 +0000 UTC m=+1051.717248540" observedRunningTime="2025-10-02 21:41:16.897075011 +0000 UTC m=+1068.220083030" watchObservedRunningTime="2025-10-02 21:41:16.902341633 +0000 UTC m=+1068.225349652" Oct 02 21:41:18 crc kubenswrapper[4636]: E1002 21:41:18.936736 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Oct 02 21:41:18 crc kubenswrapper[4636]: E1002 21:41:18.937130 4636 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Oct 02 21:41:18 crc kubenswrapper[4636]: E1002 21:41:18.937277 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ls8d8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(c1312598-8735-44c7-a810-4bb4c57e5fba): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 21:41:18 crc kubenswrapper[4636]: E1002 21:41:18.939190 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="c1312598-8735-44c7-a810-4bb4c57e5fba" Oct 02 21:41:19 crc kubenswrapper[4636]: I1002 21:41:19.901479 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d85e3d9d-106a-4b20-8496-7394884ca255","Type":"ContainerStarted","Data":"978a4eff41fea5c528418b8e08d92c68a56e5c94a91ae0dd347a4084a0375e27"} Oct 02 21:41:19 crc kubenswrapper[4636]: I1002 21:41:19.902072 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 02 21:41:19 crc kubenswrapper[4636]: I1002 21:41:19.909801 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aea58193-4a02-48f4-b4c6-b0938df463ff","Type":"ContainerStarted","Data":"72607c60ed644974d7c908663953e18d9e39618733042fc077653b2f4426c2da"} Oct 02 21:41:19 crc kubenswrapper[4636]: I1002 21:41:19.912104 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bce7eeaa-df59-4316-835f-9f0c2f233a53","Type":"ContainerStarted","Data":"9d1701cd08f422893110240f6ec16add22565b591eb366227c3e1c5b53b42a8b"} Oct 02 21:41:19 crc kubenswrapper[4636]: I1002 21:41:19.914025 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ffvpv" event={"ID":"1ba9bb1f-b0f2-4379-8ac3-7862638e6661","Type":"ContainerStarted","Data":"6040c776c3e5b16eaa69e5e8d9d105c386835924a0770be9892b1e086edc7302"} Oct 02 21:41:19 crc kubenswrapper[4636]: I1002 21:41:19.925583 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b5dv2" event={"ID":"189fcf5f-fafd-4af5-9b02-d8d33b6bfe65","Type":"ContainerStarted","Data":"abb01673add9cd79d2af380e574ae44fa66a7fba33f8afc2c86b1565615d5f14"} Oct 02 21:41:19 crc kubenswrapper[4636]: I1002 21:41:19.925733 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-b5dv2" Oct 02 21:41:19 crc kubenswrapper[4636]: I1002 21:41:19.928382 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9","Type":"ContainerStarted","Data":"0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7"} Oct 02 21:41:19 crc kubenswrapper[4636]: E1002 21:41:19.929184 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="c1312598-8735-44c7-a810-4bb4c57e5fba" Oct 02 21:41:19 crc kubenswrapper[4636]: I1002 21:41:19.934099 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=15.33010044 podStartE2EDuration="31.934088046s" podCreationTimestamp="2025-10-02 21:40:48 +0000 UTC" firstStartedPulling="2025-10-02 21:41:01.453122317 +0000 UTC m=+1052.776130336" lastFinishedPulling="2025-10-02 21:41:18.057109923 +0000 UTC m=+1069.380117942" observedRunningTime="2025-10-02 21:41:19.917559531 +0000 UTC m=+1071.240567560" watchObservedRunningTime="2025-10-02 21:41:19.934088046 +0000 UTC m=+1071.257096055" Oct 02 21:41:20 crc kubenswrapper[4636]: I1002 21:41:20.020484 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-b5dv2" podStartSLOduration=8.520615835 podStartE2EDuration="26.020469294s" podCreationTimestamp="2025-10-02 21:40:54 +0000 UTC" firstStartedPulling="2025-10-02 21:41:01.429205913 +0000 UTC m=+1052.752213932" lastFinishedPulling="2025-10-02 21:41:18.929059372 +0000 UTC m=+1070.252067391" observedRunningTime="2025-10-02 21:41:20.016588209 +0000 UTC m=+1071.339596228" watchObservedRunningTime="2025-10-02 21:41:20.020469294 +0000 UTC m=+1071.343477313" Oct 02 21:41:20 crc kubenswrapper[4636]: I1002 21:41:20.936382 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"780601d4-af7f-47ee-b580-939d5531e805","Type":"ContainerStarted","Data":"1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a"} Oct 02 21:41:20 crc kubenswrapper[4636]: I1002 21:41:20.938569 4636 generic.go:334] "Generic (PLEG): container finished" podID="1ba9bb1f-b0f2-4379-8ac3-7862638e6661" containerID="6040c776c3e5b16eaa69e5e8d9d105c386835924a0770be9892b1e086edc7302" exitCode=0 Oct 02 21:41:20 crc kubenswrapper[4636]: I1002 21:41:20.938688 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ffvpv" event={"ID":"1ba9bb1f-b0f2-4379-8ac3-7862638e6661","Type":"ContainerDied","Data":"6040c776c3e5b16eaa69e5e8d9d105c386835924a0770be9892b1e086edc7302"} Oct 02 21:41:23 crc kubenswrapper[4636]: I1002 21:41:23.117531 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:41:23 crc kubenswrapper[4636]: I1002 21:41:23.117887 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:41:23 crc kubenswrapper[4636]: I1002 21:41:23.963224 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aea58193-4a02-48f4-b4c6-b0938df463ff","Type":"ContainerStarted","Data":"88a019dc1ed7e6f28423eb4ad3e99a715caa6f5c8f8f4d8b326d283e5b02dd18"} Oct 02 21:41:23 crc kubenswrapper[4636]: I1002 21:41:23.965338 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bce7eeaa-df59-4316-835f-9f0c2f233a53","Type":"ContainerStarted","Data":"03e15495b5a0d6899b8a2bdc1e945c27e2a3a1be2ff06cb04c7bcc75529ecf95"} Oct 02 21:41:23 crc kubenswrapper[4636]: I1002 21:41:23.968730 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ffvpv" event={"ID":"1ba9bb1f-b0f2-4379-8ac3-7862638e6661","Type":"ContainerStarted","Data":"481d0bd74bb5c5a5c4cfa7cb9d4e4dca9554e92e1fc247cefae98e0217b9e6b1"} Oct 02 21:41:23 crc kubenswrapper[4636]: I1002 21:41:23.968819 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ffvpv" event={"ID":"1ba9bb1f-b0f2-4379-8ac3-7862638e6661","Type":"ContainerStarted","Data":"619d402d6ab0402aeb99791beef6f47e60c0e93683d851657c0e1e00a1ce6f46"} Oct 02 21:41:23 crc kubenswrapper[4636]: I1002 21:41:23.969304 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:41:23 crc kubenswrapper[4636]: I1002 21:41:23.969370 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:41:23 crc kubenswrapper[4636]: I1002 21:41:23.996998 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=11.585225122 podStartE2EDuration="26.996981718s" podCreationTimestamp="2025-10-02 21:40:57 +0000 UTC" firstStartedPulling="2025-10-02 21:41:07.385209463 +0000 UTC m=+1058.708217482" lastFinishedPulling="2025-10-02 21:41:22.796966059 +0000 UTC m=+1074.119974078" observedRunningTime="2025-10-02 21:41:23.992847967 +0000 UTC m=+1075.315856026" watchObservedRunningTime="2025-10-02 21:41:23.996981718 +0000 UTC m=+1075.319989737" Oct 02 21:41:24 crc kubenswrapper[4636]: I1002 21:41:24.065228 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-ffvpv" podStartSLOduration=13.211770529 podStartE2EDuration="30.065206737s" podCreationTimestamp="2025-10-02 21:40:54 +0000 UTC" firstStartedPulling="2025-10-02 21:41:01.626619453 +0000 UTC m=+1052.949627472" lastFinishedPulling="2025-10-02 21:41:18.480055661 +0000 UTC m=+1069.803063680" observedRunningTime="2025-10-02 21:41:24.063982104 +0000 UTC m=+1075.386990123" watchObservedRunningTime="2025-10-02 21:41:24.065206737 +0000 UTC m=+1075.388214756" Oct 02 21:41:24 crc kubenswrapper[4636]: I1002 21:41:24.070137 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=9.777281932 podStartE2EDuration="31.070120679s" podCreationTimestamp="2025-10-02 21:40:53 +0000 UTC" firstStartedPulling="2025-10-02 21:41:01.504309197 +0000 UTC m=+1052.827317216" lastFinishedPulling="2025-10-02 21:41:22.797147944 +0000 UTC m=+1074.120155963" observedRunningTime="2025-10-02 21:41:24.03305088 +0000 UTC m=+1075.356058899" watchObservedRunningTime="2025-10-02 21:41:24.070120679 +0000 UTC m=+1075.393128698" Oct 02 21:41:24 crc kubenswrapper[4636]: I1002 21:41:24.339015 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.087187 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.087542 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.132807 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.367997 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.404420 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.412982 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.478754 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-gvl6d"] Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.478968 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" podUID="6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" containerName="dnsmasq-dns" containerID="cri-o://898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c" gracePeriod=10 Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.931859 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.983493 4636 generic.go:334] "Generic (PLEG): container finished" podID="6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" containerID="898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c" exitCode=0 Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.983847 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" event={"ID":"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d","Type":"ContainerDied","Data":"898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c"} Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.983876 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" event={"ID":"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d","Type":"ContainerDied","Data":"f46698d302657cff4660ff48d4ce979850bbdcabb1de5bf639863377f79deef8"} Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.983899 4636 scope.go:117] "RemoveContainer" containerID="898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c" Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.983985 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-gvl6d" Oct 02 21:41:25 crc kubenswrapper[4636]: I1002 21:41:25.984735 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.003209 4636 scope.go:117] "RemoveContainer" containerID="7014da456eb2a717bdbd2c77925a0453f8b049ddbed57ce8c589961135c7bedc" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.023964 4636 scope.go:117] "RemoveContainer" containerID="898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.023994 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 02 21:41:26 crc kubenswrapper[4636]: E1002 21:41:26.027290 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c\": container with ID starting with 898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c not found: ID does not exist" containerID="898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.027336 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c"} err="failed to get container status \"898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c\": rpc error: code = NotFound desc = could not find container \"898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c\": container with ID starting with 898a64b98c8620e95b8063632fd9a89fdce6ea1bc2bc6ef07254990d7fd9806c not found: ID does not exist" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.027359 4636 scope.go:117] "RemoveContainer" containerID="7014da456eb2a717bdbd2c77925a0453f8b049ddbed57ce8c589961135c7bedc" Oct 02 21:41:26 crc kubenswrapper[4636]: E1002 21:41:26.030027 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7014da456eb2a717bdbd2c77925a0453f8b049ddbed57ce8c589961135c7bedc\": container with ID starting with 7014da456eb2a717bdbd2c77925a0453f8b049ddbed57ce8c589961135c7bedc not found: ID does not exist" containerID="7014da456eb2a717bdbd2c77925a0453f8b049ddbed57ce8c589961135c7bedc" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.030057 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7014da456eb2a717bdbd2c77925a0453f8b049ddbed57ce8c589961135c7bedc"} err="failed to get container status \"7014da456eb2a717bdbd2c77925a0453f8b049ddbed57ce8c589961135c7bedc\": rpc error: code = NotFound desc = could not find container \"7014da456eb2a717bdbd2c77925a0453f8b049ddbed57ce8c589961135c7bedc\": container with ID starting with 7014da456eb2a717bdbd2c77925a0453f8b049ddbed57ce8c589961135c7bedc not found: ID does not exist" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.041280 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.083984 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-config\") pod \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.084119 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-dns-svc\") pod \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.084155 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87ffq\" (UniqueName: \"kubernetes.io/projected/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-kube-api-access-87ffq\") pod \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\" (UID: \"6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d\") " Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.159084 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-kube-api-access-87ffq" (OuterVolumeSpecName: "kube-api-access-87ffq") pod "6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" (UID: "6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d"). InnerVolumeSpecName "kube-api-access-87ffq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.160170 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" (UID: "6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.177790 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-config" (OuterVolumeSpecName: "config") pod "6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" (UID: "6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.186130 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.186161 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87ffq\" (UniqueName: \"kubernetes.io/projected/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-kube-api-access-87ffq\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.186173 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.274745 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bhqhb"] Oct 02 21:41:26 crc kubenswrapper[4636]: E1002 21:41:26.275033 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" containerName="init" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.275045 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" containerName="init" Oct 02 21:41:26 crc kubenswrapper[4636]: E1002 21:41:26.275076 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" containerName="dnsmasq-dns" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.275083 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" containerName="dnsmasq-dns" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.275232 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" containerName="dnsmasq-dns" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.276010 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.278608 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.311208 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-gvl6d"] Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.320636 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-gvl6d"] Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.388861 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4bss\" (UniqueName: \"kubernetes.io/projected/ed51854f-592c-4722-b250-ed6dda28e16f-kube-api-access-q4bss\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.388902 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-config\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.389183 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.389255 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.394374 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bhqhb"] Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.398409 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-n42rv"] Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.399280 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.402121 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.490231 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c99580d-a783-4d8f-9bf7-b8fd883e595e-combined-ca-bundle\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.490306 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5c99580d-a783-4d8f-9bf7-b8fd883e595e-ovs-rundir\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.490355 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.490382 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5c99580d-a783-4d8f-9bf7-b8fd883e595e-ovn-rundir\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.490401 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.490429 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c99580d-a783-4d8f-9bf7-b8fd883e595e-config\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.490448 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5265c\" (UniqueName: \"kubernetes.io/projected/5c99580d-a783-4d8f-9bf7-b8fd883e595e-kube-api-access-5265c\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.490478 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c99580d-a783-4d8f-9bf7-b8fd883e595e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.490501 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4bss\" (UniqueName: \"kubernetes.io/projected/ed51854f-592c-4722-b250-ed6dda28e16f-kube-api-access-q4bss\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.490520 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-config\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.490534 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-n42rv"] Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.491326 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-config\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.491354 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.491873 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.507016 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4bss\" (UniqueName: \"kubernetes.io/projected/ed51854f-592c-4722-b250-ed6dda28e16f-kube-api-access-q4bss\") pod \"dnsmasq-dns-7f896c8c65-bhqhb\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.594452 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.596271 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c99580d-a783-4d8f-9bf7-b8fd883e595e-combined-ca-bundle\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.596344 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5c99580d-a783-4d8f-9bf7-b8fd883e595e-ovs-rundir\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.596984 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5c99580d-a783-4d8f-9bf7-b8fd883e595e-ovn-rundir\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.597234 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5c99580d-a783-4d8f-9bf7-b8fd883e595e-ovn-rundir\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.597238 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c99580d-a783-4d8f-9bf7-b8fd883e595e-config\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.597316 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5265c\" (UniqueName: \"kubernetes.io/projected/5c99580d-a783-4d8f-9bf7-b8fd883e595e-kube-api-access-5265c\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.597381 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c99580d-a783-4d8f-9bf7-b8fd883e595e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.597177 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5c99580d-a783-4d8f-9bf7-b8fd883e595e-ovs-rundir\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.597898 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c99580d-a783-4d8f-9bf7-b8fd883e595e-config\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.606579 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c99580d-a783-4d8f-9bf7-b8fd883e595e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.613052 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c99580d-a783-4d8f-9bf7-b8fd883e595e-combined-ca-bundle\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.659264 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5265c\" (UniqueName: \"kubernetes.io/projected/5c99580d-a783-4d8f-9bf7-b8fd883e595e-kube-api-access-5265c\") pod \"ovn-controller-metrics-n42rv\" (UID: \"5c99580d-a783-4d8f-9bf7-b8fd883e595e\") " pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.712282 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bhqhb"] Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.717418 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-n42rv" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.782963 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-59qcs"] Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.784126 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.794090 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.797693 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-59qcs"] Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.922393 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.934868 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.944908 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg69t\" (UniqueName: \"kubernetes.io/projected/7ff2c88e-b247-45b3-b662-149b15c6d8e7-kube-api-access-kg69t\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.944973 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.945322 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.944984 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.949154 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-config\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.949235 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.949295 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.949478 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-fcrmz" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.949559 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 02 21:41:26 crc kubenswrapper[4636]: I1002 21:41:26.957430 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050629 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050671 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050695 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-config\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050721 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-scripts\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050735 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050757 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050786 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxkx9\" (UniqueName: \"kubernetes.io/projected/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-kube-api-access-lxkx9\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050810 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050856 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050872 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-config\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050892 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg69t\" (UniqueName: \"kubernetes.io/projected/7ff2c88e-b247-45b3-b662-149b15c6d8e7-kube-api-access-kg69t\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.050912 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.051682 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-config\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.051801 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.052473 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.056212 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.072456 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg69t\" (UniqueName: \"kubernetes.io/projected/7ff2c88e-b247-45b3-b662-149b15c6d8e7-kube-api-access-kg69t\") pod \"dnsmasq-dns-86db49b7ff-59qcs\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.134885 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bhqhb"] Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.153543 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-scripts\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.153747 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.153783 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxkx9\" (UniqueName: \"kubernetes.io/projected/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-kube-api-access-lxkx9\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.153884 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.153921 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-config\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.153959 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.154038 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.155254 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-config\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.155562 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-scripts\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.155992 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.159004 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: W1002 21:41:27.159870 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded51854f_592c_4722_b250_ed6dda28e16f.slice/crio-ccbcc8eaec68dfc2543810726dcfd43a9d55a52b14e4828d00a0c38cecc6c663 WatchSource:0}: Error finding container ccbcc8eaec68dfc2543810726dcfd43a9d55a52b14e4828d00a0c38cecc6c663: Status 404 returned error can't find the container with id ccbcc8eaec68dfc2543810726dcfd43a9d55a52b14e4828d00a0c38cecc6c663 Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.163578 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.163864 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.168285 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.175571 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxkx9\" (UniqueName: \"kubernetes.io/projected/00dad3a1-1f5c-4a90-a5b2-5437dc73c234-kube-api-access-lxkx9\") pod \"ovn-northd-0\" (UID: \"00dad3a1-1f5c-4a90-a5b2-5437dc73c234\") " pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.285524 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.480611 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-n42rv"] Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.588931 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 02 21:41:27 crc kubenswrapper[4636]: W1002 21:41:27.597340 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00dad3a1_1f5c_4a90_a5b2_5437dc73c234.slice/crio-9a90f6d151298a942d5f545a4b44f1ca49327c7a13255b299915ef31dc90763d WatchSource:0}: Error finding container 9a90f6d151298a942d5f545a4b44f1ca49327c7a13255b299915ef31dc90763d: Status 404 returned error can't find the container with id 9a90f6d151298a942d5f545a4b44f1ca49327c7a13255b299915ef31dc90763d Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.628741 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d" path="/var/lib/kubelet/pods/6a0e8c5e-73de-4f5d-861f-ec3ac224ca7d/volumes" Oct 02 21:41:27 crc kubenswrapper[4636]: I1002 21:41:27.677276 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-59qcs"] Oct 02 21:41:27 crc kubenswrapper[4636]: W1002 21:41:27.686517 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff2c88e_b247_45b3_b662_149b15c6d8e7.slice/crio-f9b09bf715294e76f093e0466f716771020bb838f62a17743bc42f3fb5be51eb WatchSource:0}: Error finding container f9b09bf715294e76f093e0466f716771020bb838f62a17743bc42f3fb5be51eb: Status 404 returned error can't find the container with id f9b09bf715294e76f093e0466f716771020bb838f62a17743bc42f3fb5be51eb Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.004838 4636 generic.go:334] "Generic (PLEG): container finished" podID="ed51854f-592c-4722-b250-ed6dda28e16f" containerID="d041f4d5988e7790722930dd5943f9ce9cb6aec323055f95cbd0a9e84994ea91" exitCode=0 Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.004893 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" event={"ID":"ed51854f-592c-4722-b250-ed6dda28e16f","Type":"ContainerDied","Data":"d041f4d5988e7790722930dd5943f9ce9cb6aec323055f95cbd0a9e84994ea91"} Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.004918 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" event={"ID":"ed51854f-592c-4722-b250-ed6dda28e16f","Type":"ContainerStarted","Data":"ccbcc8eaec68dfc2543810726dcfd43a9d55a52b14e4828d00a0c38cecc6c663"} Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.006638 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-n42rv" event={"ID":"5c99580d-a783-4d8f-9bf7-b8fd883e595e","Type":"ContainerStarted","Data":"67a41ecbcc56eafc8d8795161087074f2d6a9756e85e7c5900b347192ed1ce5b"} Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.006658 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-n42rv" event={"ID":"5c99580d-a783-4d8f-9bf7-b8fd883e595e","Type":"ContainerStarted","Data":"df98eb03a89977758fe674d035faaf4e4b90fb05414a97148a44e64052375b0d"} Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.009593 4636 generic.go:334] "Generic (PLEG): container finished" podID="7ff2c88e-b247-45b3-b662-149b15c6d8e7" containerID="666f5859be70724e41bca62bf460b995ada471707f7cf5017f95a2b12d107456" exitCode=0 Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.009732 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" event={"ID":"7ff2c88e-b247-45b3-b662-149b15c6d8e7","Type":"ContainerDied","Data":"666f5859be70724e41bca62bf460b995ada471707f7cf5017f95a2b12d107456"} Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.009800 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" event={"ID":"7ff2c88e-b247-45b3-b662-149b15c6d8e7","Type":"ContainerStarted","Data":"f9b09bf715294e76f093e0466f716771020bb838f62a17743bc42f3fb5be51eb"} Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.011528 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"80a6d533-3442-4d4f-be04-1e95eefb5598","Type":"ContainerStarted","Data":"a9612cafa9ea1072dbec23bf41235575621d3fe79af16992a9176d4acf02d1b3"} Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.012981 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"00dad3a1-1f5c-4a90-a5b2-5437dc73c234","Type":"ContainerStarted","Data":"9a90f6d151298a942d5f545a4b44f1ca49327c7a13255b299915ef31dc90763d"} Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.017178 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f2b95105-58f6-4984-92c7-d3dbc7dfa131","Type":"ContainerStarted","Data":"5df3c5414743a27ca2e3e29a7093e909929b997fe315fb942da9c4dbba695c93"} Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.108975 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-n42rv" podStartSLOduration=2.108722227 podStartE2EDuration="2.108722227s" podCreationTimestamp="2025-10-02 21:41:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:41:28.105566052 +0000 UTC m=+1079.428574071" watchObservedRunningTime="2025-10-02 21:41:28.108722227 +0000 UTC m=+1079.431730246" Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.304401 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.383971 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4bss\" (UniqueName: \"kubernetes.io/projected/ed51854f-592c-4722-b250-ed6dda28e16f-kube-api-access-q4bss\") pod \"ed51854f-592c-4722-b250-ed6dda28e16f\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.384059 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-config\") pod \"ed51854f-592c-4722-b250-ed6dda28e16f\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.384132 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-ovsdbserver-sb\") pod \"ed51854f-592c-4722-b250-ed6dda28e16f\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.384179 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-dns-svc\") pod \"ed51854f-592c-4722-b250-ed6dda28e16f\" (UID: \"ed51854f-592c-4722-b250-ed6dda28e16f\") " Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.398955 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed51854f-592c-4722-b250-ed6dda28e16f-kube-api-access-q4bss" (OuterVolumeSpecName: "kube-api-access-q4bss") pod "ed51854f-592c-4722-b250-ed6dda28e16f" (UID: "ed51854f-592c-4722-b250-ed6dda28e16f"). InnerVolumeSpecName "kube-api-access-q4bss". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.404327 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ed51854f-592c-4722-b250-ed6dda28e16f" (UID: "ed51854f-592c-4722-b250-ed6dda28e16f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.405233 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-config" (OuterVolumeSpecName: "config") pod "ed51854f-592c-4722-b250-ed6dda28e16f" (UID: "ed51854f-592c-4722-b250-ed6dda28e16f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.406432 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ed51854f-592c-4722-b250-ed6dda28e16f" (UID: "ed51854f-592c-4722-b250-ed6dda28e16f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.485986 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.486023 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4bss\" (UniqueName: \"kubernetes.io/projected/ed51854f-592c-4722-b250-ed6dda28e16f-kube-api-access-q4bss\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.486035 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:28 crc kubenswrapper[4636]: I1002 21:41:28.486044 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed51854f-592c-4722-b250-ed6dda28e16f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:29 crc kubenswrapper[4636]: I1002 21:41:29.027922 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" event={"ID":"ed51854f-592c-4722-b250-ed6dda28e16f","Type":"ContainerDied","Data":"ccbcc8eaec68dfc2543810726dcfd43a9d55a52b14e4828d00a0c38cecc6c663"} Oct 02 21:41:29 crc kubenswrapper[4636]: I1002 21:41:29.028242 4636 scope.go:117] "RemoveContainer" containerID="d041f4d5988e7790722930dd5943f9ce9cb6aec323055f95cbd0a9e84994ea91" Oct 02 21:41:29 crc kubenswrapper[4636]: I1002 21:41:29.028341 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-bhqhb" Oct 02 21:41:29 crc kubenswrapper[4636]: I1002 21:41:29.042398 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" event={"ID":"7ff2c88e-b247-45b3-b662-149b15c6d8e7","Type":"ContainerStarted","Data":"eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e"} Oct 02 21:41:29 crc kubenswrapper[4636]: I1002 21:41:29.042429 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:29 crc kubenswrapper[4636]: I1002 21:41:29.095111 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bhqhb"] Oct 02 21:41:29 crc kubenswrapper[4636]: I1002 21:41:29.107963 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-bhqhb"] Oct 02 21:41:29 crc kubenswrapper[4636]: I1002 21:41:29.134425 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" podStartSLOduration=3.134405418 podStartE2EDuration="3.134405418s" podCreationTimestamp="2025-10-02 21:41:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:41:29.127106852 +0000 UTC m=+1080.450114891" watchObservedRunningTime="2025-10-02 21:41:29.134405418 +0000 UTC m=+1080.457413447" Oct 02 21:41:29 crc kubenswrapper[4636]: I1002 21:41:29.614437 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed51854f-592c-4722-b250-ed6dda28e16f" path="/var/lib/kubelet/pods/ed51854f-592c-4722-b250-ed6dda28e16f/volumes" Oct 02 21:41:30 crc kubenswrapper[4636]: I1002 21:41:30.049348 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"00dad3a1-1f5c-4a90-a5b2-5437dc73c234","Type":"ContainerStarted","Data":"a2c478290e39c271682b4c0a59118a106e83d32705ca628b20ec2ab2adba6262"} Oct 02 21:41:30 crc kubenswrapper[4636]: I1002 21:41:30.049634 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 02 21:41:30 crc kubenswrapper[4636]: I1002 21:41:30.049645 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"00dad3a1-1f5c-4a90-a5b2-5437dc73c234","Type":"ContainerStarted","Data":"8cbc1c693673fabbb28f386d774e37e7a69c668d333648c631e79ebe621fa22c"} Oct 02 21:41:30 crc kubenswrapper[4636]: I1002 21:41:30.085266 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.90294152 podStartE2EDuration="4.085245173s" podCreationTimestamp="2025-10-02 21:41:26 +0000 UTC" firstStartedPulling="2025-10-02 21:41:27.623734916 +0000 UTC m=+1078.946742935" lastFinishedPulling="2025-10-02 21:41:28.806038569 +0000 UTC m=+1080.129046588" observedRunningTime="2025-10-02 21:41:30.077675569 +0000 UTC m=+1081.400683588" watchObservedRunningTime="2025-10-02 21:41:30.085245173 +0000 UTC m=+1081.408253202" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.483112 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-59qcs"] Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.483540 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" podUID="7ff2c88e-b247-45b3-b662-149b15c6d8e7" containerName="dnsmasq-dns" containerID="cri-o://eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e" gracePeriod=10 Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.527439 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-ndhlh"] Oct 02 21:41:31 crc kubenswrapper[4636]: E1002 21:41:31.529811 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed51854f-592c-4722-b250-ed6dda28e16f" containerName="init" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.529831 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed51854f-592c-4722-b250-ed6dda28e16f" containerName="init" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.529978 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed51854f-592c-4722-b250-ed6dda28e16f" containerName="init" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.533342 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.547416 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-ndhlh"] Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.632179 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-dns-svc\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.632230 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.632309 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgjsn\" (UniqueName: \"kubernetes.io/projected/6eecc450-86a9-4d48-957d-5c8972c14bc6-kube-api-access-kgjsn\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.632329 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.632388 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-config\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.733681 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-dns-svc\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.733722 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.733778 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgjsn\" (UniqueName: \"kubernetes.io/projected/6eecc450-86a9-4d48-957d-5c8972c14bc6-kube-api-access-kgjsn\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.733806 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.733856 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-config\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.735109 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-dns-svc\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.735713 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.735806 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.735820 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-config\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.771059 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgjsn\" (UniqueName: \"kubernetes.io/projected/6eecc450-86a9-4d48-957d-5c8972c14bc6-kube-api-access-kgjsn\") pod \"dnsmasq-dns-698758b865-ndhlh\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.876649 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:31 crc kubenswrapper[4636]: I1002 21:41:31.978123 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.038402 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg69t\" (UniqueName: \"kubernetes.io/projected/7ff2c88e-b247-45b3-b662-149b15c6d8e7-kube-api-access-kg69t\") pod \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.038500 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-nb\") pod \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.038598 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-config\") pod \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.038645 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-sb\") pod \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.038660 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-dns-svc\") pod \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\" (UID: \"7ff2c88e-b247-45b3-b662-149b15c6d8e7\") " Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.068958 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ff2c88e-b247-45b3-b662-149b15c6d8e7-kube-api-access-kg69t" (OuterVolumeSpecName: "kube-api-access-kg69t") pod "7ff2c88e-b247-45b3-b662-149b15c6d8e7" (UID: "7ff2c88e-b247-45b3-b662-149b15c6d8e7"). InnerVolumeSpecName "kube-api-access-kg69t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.083974 4636 generic.go:334] "Generic (PLEG): container finished" podID="f2b95105-58f6-4984-92c7-d3dbc7dfa131" containerID="5df3c5414743a27ca2e3e29a7093e909929b997fe315fb942da9c4dbba695c93" exitCode=0 Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.084081 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f2b95105-58f6-4984-92c7-d3dbc7dfa131","Type":"ContainerDied","Data":"5df3c5414743a27ca2e3e29a7093e909929b997fe315fb942da9c4dbba695c93"} Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.086467 4636 generic.go:334] "Generic (PLEG): container finished" podID="7ff2c88e-b247-45b3-b662-149b15c6d8e7" containerID="eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e" exitCode=0 Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.086522 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" event={"ID":"7ff2c88e-b247-45b3-b662-149b15c6d8e7","Type":"ContainerDied","Data":"eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e"} Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.086548 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" event={"ID":"7ff2c88e-b247-45b3-b662-149b15c6d8e7","Type":"ContainerDied","Data":"f9b09bf715294e76f093e0466f716771020bb838f62a17743bc42f3fb5be51eb"} Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.086564 4636 scope.go:117] "RemoveContainer" containerID="eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.086675 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-59qcs" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.101917 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c1312598-8735-44c7-a810-4bb4c57e5fba","Type":"ContainerStarted","Data":"ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3"} Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.104719 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.122115 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7ff2c88e-b247-45b3-b662-149b15c6d8e7" (UID: "7ff2c88e-b247-45b3-b662-149b15c6d8e7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.136044 4636 generic.go:334] "Generic (PLEG): container finished" podID="80a6d533-3442-4d4f-be04-1e95eefb5598" containerID="a9612cafa9ea1072dbec23bf41235575621d3fe79af16992a9176d4acf02d1b3" exitCode=0 Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.136088 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"80a6d533-3442-4d4f-be04-1e95eefb5598","Type":"ContainerDied","Data":"a9612cafa9ea1072dbec23bf41235575621d3fe79af16992a9176d4acf02d1b3"} Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.144201 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg69t\" (UniqueName: \"kubernetes.io/projected/7ff2c88e-b247-45b3-b662-149b15c6d8e7-kube-api-access-kg69t\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.144225 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.145545 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=11.223578031 podStartE2EDuration="41.145527876s" podCreationTimestamp="2025-10-02 21:40:51 +0000 UTC" firstStartedPulling="2025-10-02 21:41:01.078982374 +0000 UTC m=+1052.401990393" lastFinishedPulling="2025-10-02 21:41:31.000932199 +0000 UTC m=+1082.323940238" observedRunningTime="2025-10-02 21:41:32.139466702 +0000 UTC m=+1083.462474721" watchObservedRunningTime="2025-10-02 21:41:32.145527876 +0000 UTC m=+1083.468535915" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.176188 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7ff2c88e-b247-45b3-b662-149b15c6d8e7" (UID: "7ff2c88e-b247-45b3-b662-149b15c6d8e7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.177622 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7ff2c88e-b247-45b3-b662-149b15c6d8e7" (UID: "7ff2c88e-b247-45b3-b662-149b15c6d8e7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.196352 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-config" (OuterVolumeSpecName: "config") pod "7ff2c88e-b247-45b3-b662-149b15c6d8e7" (UID: "7ff2c88e-b247-45b3-b662-149b15c6d8e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.242129 4636 scope.go:117] "RemoveContainer" containerID="666f5859be70724e41bca62bf460b995ada471707f7cf5017f95a2b12d107456" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.245267 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.245287 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.245296 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ff2c88e-b247-45b3-b662-149b15c6d8e7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.264814 4636 scope.go:117] "RemoveContainer" containerID="eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e" Oct 02 21:41:32 crc kubenswrapper[4636]: E1002 21:41:32.265417 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e\": container with ID starting with eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e not found: ID does not exist" containerID="eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.265458 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e"} err="failed to get container status \"eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e\": rpc error: code = NotFound desc = could not find container \"eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e\": container with ID starting with eea7c882bfdeb21cbc68ca9d66cce819e5b2b97325c29824f7d35a13c38d154e not found: ID does not exist" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.265485 4636 scope.go:117] "RemoveContainer" containerID="666f5859be70724e41bca62bf460b995ada471707f7cf5017f95a2b12d107456" Oct 02 21:41:32 crc kubenswrapper[4636]: E1002 21:41:32.265833 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"666f5859be70724e41bca62bf460b995ada471707f7cf5017f95a2b12d107456\": container with ID starting with 666f5859be70724e41bca62bf460b995ada471707f7cf5017f95a2b12d107456 not found: ID does not exist" containerID="666f5859be70724e41bca62bf460b995ada471707f7cf5017f95a2b12d107456" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.265887 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"666f5859be70724e41bca62bf460b995ada471707f7cf5017f95a2b12d107456"} err="failed to get container status \"666f5859be70724e41bca62bf460b995ada471707f7cf5017f95a2b12d107456\": rpc error: code = NotFound desc = could not find container \"666f5859be70724e41bca62bf460b995ada471707f7cf5017f95a2b12d107456\": container with ID starting with 666f5859be70724e41bca62bf460b995ada471707f7cf5017f95a2b12d107456 not found: ID does not exist" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.334636 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-ndhlh"] Oct 02 21:41:32 crc kubenswrapper[4636]: W1002 21:41:32.342081 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6eecc450_86a9_4d48_957d_5c8972c14bc6.slice/crio-8d8aa8c7ed70ee7ed9b624dc05a57d8164d8566a5beb2b36d83216e2f41d8b0b WatchSource:0}: Error finding container 8d8aa8c7ed70ee7ed9b624dc05a57d8164d8566a5beb2b36d83216e2f41d8b0b: Status 404 returned error can't find the container with id 8d8aa8c7ed70ee7ed9b624dc05a57d8164d8566a5beb2b36d83216e2f41d8b0b Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.438765 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-59qcs"] Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.445865 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-59qcs"] Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.675937 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 02 21:41:32 crc kubenswrapper[4636]: E1002 21:41:32.677225 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ff2c88e-b247-45b3-b662-149b15c6d8e7" containerName="dnsmasq-dns" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.677325 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ff2c88e-b247-45b3-b662-149b15c6d8e7" containerName="dnsmasq-dns" Oct 02 21:41:32 crc kubenswrapper[4636]: E1002 21:41:32.677392 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ff2c88e-b247-45b3-b662-149b15c6d8e7" containerName="init" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.677443 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ff2c88e-b247-45b3-b662-149b15c6d8e7" containerName="init" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.677658 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ff2c88e-b247-45b3-b662-149b15c6d8e7" containerName="dnsmasq-dns" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.682002 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.684411 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.684635 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.684491 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.689564 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-2rxgz" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.710404 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.851309 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.851395 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wclz5\" (UniqueName: \"kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-kube-api-access-wclz5\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.851428 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-cache\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.851458 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-lock\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.851489 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.953052 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-cache\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.953119 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-lock\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.953158 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.953200 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.953261 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wclz5\" (UniqueName: \"kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-kube-api-access-wclz5\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.953546 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-cache\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.953821 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.953890 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-lock\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: E1002 21:41:32.953977 4636 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 02 21:41:32 crc kubenswrapper[4636]: E1002 21:41:32.953994 4636 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 02 21:41:32 crc kubenswrapper[4636]: E1002 21:41:32.954026 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift podName:e9e8a5c2-1a89-4fb3-93d5-877930afc11d nodeName:}" failed. No retries permitted until 2025-10-02 21:41:33.454013304 +0000 UTC m=+1084.777021323 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift") pod "swift-storage-0" (UID: "e9e8a5c2-1a89-4fb3-93d5-877930afc11d") : configmap "swift-ring-files" not found Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.971510 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wclz5\" (UniqueName: \"kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-kube-api-access-wclz5\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:32 crc kubenswrapper[4636]: I1002 21:41:32.972481 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:33 crc kubenswrapper[4636]: I1002 21:41:33.147006 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"80a6d533-3442-4d4f-be04-1e95eefb5598","Type":"ContainerStarted","Data":"c45c96021bb1351a1991c27c9da34007c8f5e6ba93a4f0daa940e4a61f0278a0"} Oct 02 21:41:33 crc kubenswrapper[4636]: I1002 21:41:33.149267 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f2b95105-58f6-4984-92c7-d3dbc7dfa131","Type":"ContainerStarted","Data":"a2a810e416de2be2a7c9450567462956bd62c559904f8e4060678cd4f600a97a"} Oct 02 21:41:33 crc kubenswrapper[4636]: I1002 21:41:33.150902 4636 generic.go:334] "Generic (PLEG): container finished" podID="6eecc450-86a9-4d48-957d-5c8972c14bc6" containerID="da55a27041a2a076b4960065059118856e8be8b3c31c39d1dd538dfdac8a3a7b" exitCode=0 Oct 02 21:41:33 crc kubenswrapper[4636]: I1002 21:41:33.150945 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-ndhlh" event={"ID":"6eecc450-86a9-4d48-957d-5c8972c14bc6","Type":"ContainerDied","Data":"da55a27041a2a076b4960065059118856e8be8b3c31c39d1dd538dfdac8a3a7b"} Oct 02 21:41:33 crc kubenswrapper[4636]: I1002 21:41:33.150972 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-ndhlh" event={"ID":"6eecc450-86a9-4d48-957d-5c8972c14bc6","Type":"ContainerStarted","Data":"8d8aa8c7ed70ee7ed9b624dc05a57d8164d8566a5beb2b36d83216e2f41d8b0b"} Oct 02 21:41:33 crc kubenswrapper[4636]: I1002 21:41:33.171955 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=-9223371989.682842 podStartE2EDuration="47.171932837s" podCreationTimestamp="2025-10-02 21:40:46 +0000 UTC" firstStartedPulling="2025-10-02 21:41:00.96600867 +0000 UTC m=+1052.289016689" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:41:33.170310943 +0000 UTC m=+1084.493318972" watchObservedRunningTime="2025-10-02 21:41:33.171932837 +0000 UTC m=+1084.494940856" Oct 02 21:41:33 crc kubenswrapper[4636]: I1002 21:41:33.206473 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=20.170940558 podStartE2EDuration="46.206459367s" podCreationTimestamp="2025-10-02 21:40:47 +0000 UTC" firstStartedPulling="2025-10-02 21:41:01.088218313 +0000 UTC m=+1052.411226332" lastFinishedPulling="2025-10-02 21:41:27.123737122 +0000 UTC m=+1078.446745141" observedRunningTime="2025-10-02 21:41:33.20360594 +0000 UTC m=+1084.526613959" watchObservedRunningTime="2025-10-02 21:41:33.206459367 +0000 UTC m=+1084.529467386" Oct 02 21:41:33 crc kubenswrapper[4636]: I1002 21:41:33.461799 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:33 crc kubenswrapper[4636]: E1002 21:41:33.462037 4636 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 02 21:41:33 crc kubenswrapper[4636]: E1002 21:41:33.462264 4636 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 02 21:41:33 crc kubenswrapper[4636]: E1002 21:41:33.462364 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift podName:e9e8a5c2-1a89-4fb3-93d5-877930afc11d nodeName:}" failed. No retries permitted until 2025-10-02 21:41:34.462347673 +0000 UTC m=+1085.785355692 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift") pod "swift-storage-0" (UID: "e9e8a5c2-1a89-4fb3-93d5-877930afc11d") : configmap "swift-ring-files" not found Oct 02 21:41:33 crc kubenswrapper[4636]: I1002 21:41:33.616511 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ff2c88e-b247-45b3-b662-149b15c6d8e7" path="/var/lib/kubelet/pods/7ff2c88e-b247-45b3-b662-149b15c6d8e7/volumes" Oct 02 21:41:34 crc kubenswrapper[4636]: I1002 21:41:34.163311 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-ndhlh" event={"ID":"6eecc450-86a9-4d48-957d-5c8972c14bc6","Type":"ContainerStarted","Data":"49b2eb00b7529cf6b18cc90875079588f9396d4010e7058cdf2de8fcdaa31f1a"} Oct 02 21:41:34 crc kubenswrapper[4636]: I1002 21:41:34.163956 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:34 crc kubenswrapper[4636]: I1002 21:41:34.192601 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-ndhlh" podStartSLOduration=3.192583512 podStartE2EDuration="3.192583512s" podCreationTimestamp="2025-10-02 21:41:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:41:34.186704513 +0000 UTC m=+1085.509712532" watchObservedRunningTime="2025-10-02 21:41:34.192583512 +0000 UTC m=+1085.515591531" Oct 02 21:41:34 crc kubenswrapper[4636]: I1002 21:41:34.486302 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:34 crc kubenswrapper[4636]: E1002 21:41:34.486474 4636 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 02 21:41:34 crc kubenswrapper[4636]: E1002 21:41:34.486490 4636 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 02 21:41:34 crc kubenswrapper[4636]: E1002 21:41:34.486539 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift podName:e9e8a5c2-1a89-4fb3-93d5-877930afc11d nodeName:}" failed. No retries permitted until 2025-10-02 21:41:36.486522283 +0000 UTC m=+1087.809530312 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift") pod "swift-storage-0" (UID: "e9e8a5c2-1a89-4fb3-93d5-877930afc11d") : configmap "swift-ring-files" not found Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.524600 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:36 crc kubenswrapper[4636]: E1002 21:41:36.524890 4636 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 02 21:41:36 crc kubenswrapper[4636]: E1002 21:41:36.525196 4636 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 02 21:41:36 crc kubenswrapper[4636]: E1002 21:41:36.525297 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift podName:e9e8a5c2-1a89-4fb3-93d5-877930afc11d nodeName:}" failed. No retries permitted until 2025-10-02 21:41:40.525262776 +0000 UTC m=+1091.848270835 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift") pod "swift-storage-0" (UID: "e9e8a5c2-1a89-4fb3-93d5-877930afc11d") : configmap "swift-ring-files" not found Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.572350 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-5575q"] Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.573853 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.576706 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.576981 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.577377 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.593625 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-5575q"] Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.611880 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-5575q"] Oct 02 21:41:36 crc kubenswrapper[4636]: E1002 21:41:36.612437 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-d52p2 ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-d52p2 ring-data-devices scripts swiftconf]: context canceled" pod="openstack/swift-ring-rebalance-5575q" podUID="a1bf07a9-aa9d-48cb-90cd-4244947ef6a0" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.648603 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-hnbwh"] Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.649719 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.664779 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-hnbwh"] Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.731891 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-scripts\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.731936 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-ring-data-devices\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.731974 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-combined-ca-bundle\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.732011 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-scripts\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.732046 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhz69\" (UniqueName: \"kubernetes.io/projected/07626226-e803-4c29-a34d-9acea829a26b-kube-api-access-vhz69\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.732062 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-combined-ca-bundle\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.732096 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-dispersionconf\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.732118 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-etc-swift\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.732153 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-swiftconf\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.732172 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-ring-data-devices\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.732188 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-dispersionconf\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.732211 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d52p2\" (UniqueName: \"kubernetes.io/projected/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-kube-api-access-d52p2\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.732239 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07626226-e803-4c29-a34d-9acea829a26b-etc-swift\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.732277 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-swiftconf\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: E1002 21:41:36.773881 4636 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.47:33032->38.102.83.47:36985: write tcp 38.102.83.47:33032->38.102.83.47:36985: write: broken pipe Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.833700 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhz69\" (UniqueName: \"kubernetes.io/projected/07626226-e803-4c29-a34d-9acea829a26b-kube-api-access-vhz69\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.833742 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-combined-ca-bundle\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.833810 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-dispersionconf\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.833840 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-etc-swift\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.833880 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-swiftconf\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.833899 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-ring-data-devices\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.833914 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-dispersionconf\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.833935 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d52p2\" (UniqueName: \"kubernetes.io/projected/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-kube-api-access-d52p2\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.833953 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-swiftconf\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.833970 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07626226-e803-4c29-a34d-9acea829a26b-etc-swift\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.834005 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-scripts\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.834021 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-ring-data-devices\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.834047 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-combined-ca-bundle\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.834082 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-scripts\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.834836 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07626226-e803-4c29-a34d-9acea829a26b-etc-swift\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.835037 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-scripts\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.835067 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-ring-data-devices\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.835331 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-etc-swift\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.835770 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-ring-data-devices\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.836098 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-scripts\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.844990 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-swiftconf\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.845470 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-dispersionconf\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.845481 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-combined-ca-bundle\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.847390 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-swiftconf\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.851620 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-combined-ca-bundle\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.855247 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d52p2\" (UniqueName: \"kubernetes.io/projected/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-kube-api-access-d52p2\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.857681 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhz69\" (UniqueName: \"kubernetes.io/projected/07626226-e803-4c29-a34d-9acea829a26b-kube-api-access-vhz69\") pod \"swift-ring-rebalance-hnbwh\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.866706 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-dispersionconf\") pod \"swift-ring-rebalance-5575q\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:36 crc kubenswrapper[4636]: I1002 21:41:36.969037 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.185174 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.198349 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.341385 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-ring-data-devices\") pod \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.341431 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-combined-ca-bundle\") pod \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.341569 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-etc-swift\") pod \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.341588 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-dispersionconf\") pod \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.341629 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-scripts\") pod \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.341647 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d52p2\" (UniqueName: \"kubernetes.io/projected/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-kube-api-access-d52p2\") pod \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.341681 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-swiftconf\") pod \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\" (UID: \"a1bf07a9-aa9d-48cb-90cd-4244947ef6a0\") " Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.342285 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-scripts" (OuterVolumeSpecName: "scripts") pod "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0" (UID: "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.342323 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0" (UID: "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.342417 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0" (UID: "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.346106 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0" (UID: "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.347067 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0" (UID: "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.347362 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-kube-api-access-d52p2" (OuterVolumeSpecName: "kube-api-access-d52p2") pod "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0" (UID: "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0"). InnerVolumeSpecName "kube-api-access-d52p2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.350931 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0" (UID: "a1bf07a9-aa9d-48cb-90cd-4244947ef6a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.443374 4636 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.443646 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.443657 4636 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.443665 4636 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.443673 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.443681 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d52p2\" (UniqueName: \"kubernetes.io/projected/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-kube-api-access-d52p2\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.443692 4636 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:37 crc kubenswrapper[4636]: I1002 21:41:37.448740 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-hnbwh"] Oct 02 21:41:37 crc kubenswrapper[4636]: W1002 21:41:37.452192 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07626226_e803_4c29_a34d_9acea829a26b.slice/crio-a6871884aa9cd582db4af6fe430e43181dc3824a79da2ce8f40ec223fc7be3a8 WatchSource:0}: Error finding container a6871884aa9cd582db4af6fe430e43181dc3824a79da2ce8f40ec223fc7be3a8: Status 404 returned error can't find the container with id a6871884aa9cd582db4af6fe430e43181dc3824a79da2ce8f40ec223fc7be3a8 Oct 02 21:41:38 crc kubenswrapper[4636]: I1002 21:41:38.010997 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 02 21:41:38 crc kubenswrapper[4636]: I1002 21:41:38.011857 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 02 21:41:38 crc kubenswrapper[4636]: I1002 21:41:38.193027 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-5575q" Oct 02 21:41:38 crc kubenswrapper[4636]: I1002 21:41:38.193038 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hnbwh" event={"ID":"07626226-e803-4c29-a34d-9acea829a26b","Type":"ContainerStarted","Data":"a6871884aa9cd582db4af6fe430e43181dc3824a79da2ce8f40ec223fc7be3a8"} Oct 02 21:41:38 crc kubenswrapper[4636]: I1002 21:41:38.248224 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-5575q"] Oct 02 21:41:38 crc kubenswrapper[4636]: I1002 21:41:38.257615 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-5575q"] Oct 02 21:41:38 crc kubenswrapper[4636]: I1002 21:41:38.706527 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 02 21:41:38 crc kubenswrapper[4636]: I1002 21:41:38.710248 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 02 21:41:38 crc kubenswrapper[4636]: I1002 21:41:38.766256 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 02 21:41:39 crc kubenswrapper[4636]: I1002 21:41:39.245304 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 02 21:41:39 crc kubenswrapper[4636]: I1002 21:41:39.614267 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1bf07a9-aa9d-48cb-90cd-4244947ef6a0" path="/var/lib/kubelet/pods/a1bf07a9-aa9d-48cb-90cd-4244947ef6a0/volumes" Oct 02 21:41:40 crc kubenswrapper[4636]: I1002 21:41:40.120825 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 02 21:41:40 crc kubenswrapper[4636]: I1002 21:41:40.190297 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 02 21:41:40 crc kubenswrapper[4636]: I1002 21:41:40.628674 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:40 crc kubenswrapper[4636]: E1002 21:41:40.628958 4636 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 02 21:41:40 crc kubenswrapper[4636]: E1002 21:41:40.628971 4636 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 02 21:41:40 crc kubenswrapper[4636]: E1002 21:41:40.629014 4636 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift podName:e9e8a5c2-1a89-4fb3-93d5-877930afc11d nodeName:}" failed. No retries permitted until 2025-10-02 21:41:48.62899837 +0000 UTC m=+1099.952006389 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift") pod "swift-storage-0" (UID: "e9e8a5c2-1a89-4fb3-93d5-877930afc11d") : configmap "swift-ring-files" not found Oct 02 21:41:41 crc kubenswrapper[4636]: I1002 21:41:41.437252 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 02 21:41:41 crc kubenswrapper[4636]: I1002 21:41:41.878974 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:41:41 crc kubenswrapper[4636]: I1002 21:41:41.937919 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-fbfcm"] Oct 02 21:41:41 crc kubenswrapper[4636]: I1002 21:41:41.940330 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" podUID="56022ebb-4c18-4c79-a7e5-14081da5735d" containerName="dnsmasq-dns" containerID="cri-o://92e1800dd7ea5f3915156a0b34862f941ea83016fd9ab0a27a0af33ddd5ada71" gracePeriod=10 Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.235104 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hnbwh" event={"ID":"07626226-e803-4c29-a34d-9acea829a26b","Type":"ContainerStarted","Data":"a766c61a9e954dbf7b9583bf73f07007d9de65b1d272ebf7f9455db92f5d48ac"} Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.238136 4636 generic.go:334] "Generic (PLEG): container finished" podID="56022ebb-4c18-4c79-a7e5-14081da5735d" containerID="92e1800dd7ea5f3915156a0b34862f941ea83016fd9ab0a27a0af33ddd5ada71" exitCode=0 Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.238193 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" event={"ID":"56022ebb-4c18-4c79-a7e5-14081da5735d","Type":"ContainerDied","Data":"92e1800dd7ea5f3915156a0b34862f941ea83016fd9ab0a27a0af33ddd5ada71"} Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.341539 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.360074 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-hnbwh" podStartSLOduration=2.763979457 podStartE2EDuration="6.36006014s" podCreationTimestamp="2025-10-02 21:41:36 +0000 UTC" firstStartedPulling="2025-10-02 21:41:37.45492212 +0000 UTC m=+1088.777930139" lastFinishedPulling="2025-10-02 21:41:41.051002803 +0000 UTC m=+1092.374010822" observedRunningTime="2025-10-02 21:41:42.262627894 +0000 UTC m=+1093.585635913" watchObservedRunningTime="2025-10-02 21:41:42.36006014 +0000 UTC m=+1093.683068159" Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.413793 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.466367 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-dns-svc\") pod \"56022ebb-4c18-4c79-a7e5-14081da5735d\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.466474 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-config\") pod \"56022ebb-4c18-4c79-a7e5-14081da5735d\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.466600 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5g7l4\" (UniqueName: \"kubernetes.io/projected/56022ebb-4c18-4c79-a7e5-14081da5735d-kube-api-access-5g7l4\") pod \"56022ebb-4c18-4c79-a7e5-14081da5735d\" (UID: \"56022ebb-4c18-4c79-a7e5-14081da5735d\") " Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.483998 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56022ebb-4c18-4c79-a7e5-14081da5735d-kube-api-access-5g7l4" (OuterVolumeSpecName: "kube-api-access-5g7l4") pod "56022ebb-4c18-4c79-a7e5-14081da5735d" (UID: "56022ebb-4c18-4c79-a7e5-14081da5735d"). InnerVolumeSpecName "kube-api-access-5g7l4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.501698 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-config" (OuterVolumeSpecName: "config") pod "56022ebb-4c18-4c79-a7e5-14081da5735d" (UID: "56022ebb-4c18-4c79-a7e5-14081da5735d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.523078 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "56022ebb-4c18-4c79-a7e5-14081da5735d" (UID: "56022ebb-4c18-4c79-a7e5-14081da5735d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.569970 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5g7l4\" (UniqueName: \"kubernetes.io/projected/56022ebb-4c18-4c79-a7e5-14081da5735d-kube-api-access-5g7l4\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.570069 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:42 crc kubenswrapper[4636]: I1002 21:41:42.570089 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56022ebb-4c18-4c79-a7e5-14081da5735d-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:43 crc kubenswrapper[4636]: I1002 21:41:43.248478 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" Oct 02 21:41:43 crc kubenswrapper[4636]: I1002 21:41:43.251497 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-fbfcm" event={"ID":"56022ebb-4c18-4c79-a7e5-14081da5735d","Type":"ContainerDied","Data":"8488495a949c18a633f8096c758eefd4f8d76add92f498acba375dc25538bd6a"} Oct 02 21:41:43 crc kubenswrapper[4636]: I1002 21:41:43.251596 4636 scope.go:117] "RemoveContainer" containerID="92e1800dd7ea5f3915156a0b34862f941ea83016fd9ab0a27a0af33ddd5ada71" Oct 02 21:41:43 crc kubenswrapper[4636]: I1002 21:41:43.282966 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-fbfcm"] Oct 02 21:41:43 crc kubenswrapper[4636]: I1002 21:41:43.290041 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-fbfcm"] Oct 02 21:41:43 crc kubenswrapper[4636]: I1002 21:41:43.299702 4636 scope.go:117] "RemoveContainer" containerID="84b69ba1ef6bdad08cf2ae8d6ec8a542e58e98dbfb773f6fa4f0037a2dc37a45" Oct 02 21:41:43 crc kubenswrapper[4636]: I1002 21:41:43.616168 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56022ebb-4c18-4c79-a7e5-14081da5735d" path="/var/lib/kubelet/pods/56022ebb-4c18-4c79-a7e5-14081da5735d/volumes" Oct 02 21:41:44 crc kubenswrapper[4636]: I1002 21:41:44.616641 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-p47cm"] Oct 02 21:41:44 crc kubenswrapper[4636]: E1002 21:41:44.617282 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56022ebb-4c18-4c79-a7e5-14081da5735d" containerName="init" Oct 02 21:41:44 crc kubenswrapper[4636]: I1002 21:41:44.617295 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="56022ebb-4c18-4c79-a7e5-14081da5735d" containerName="init" Oct 02 21:41:44 crc kubenswrapper[4636]: E1002 21:41:44.617310 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56022ebb-4c18-4c79-a7e5-14081da5735d" containerName="dnsmasq-dns" Oct 02 21:41:44 crc kubenswrapper[4636]: I1002 21:41:44.617316 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="56022ebb-4c18-4c79-a7e5-14081da5735d" containerName="dnsmasq-dns" Oct 02 21:41:44 crc kubenswrapper[4636]: I1002 21:41:44.617468 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="56022ebb-4c18-4c79-a7e5-14081da5735d" containerName="dnsmasq-dns" Oct 02 21:41:44 crc kubenswrapper[4636]: I1002 21:41:44.617972 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-p47cm" Oct 02 21:41:44 crc kubenswrapper[4636]: I1002 21:41:44.631417 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-p47cm"] Oct 02 21:41:44 crc kubenswrapper[4636]: I1002 21:41:44.706785 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdnjd\" (UniqueName: \"kubernetes.io/projected/8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51-kube-api-access-bdnjd\") pod \"glance-db-create-p47cm\" (UID: \"8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51\") " pod="openstack/glance-db-create-p47cm" Oct 02 21:41:44 crc kubenswrapper[4636]: I1002 21:41:44.808234 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdnjd\" (UniqueName: \"kubernetes.io/projected/8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51-kube-api-access-bdnjd\") pod \"glance-db-create-p47cm\" (UID: \"8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51\") " pod="openstack/glance-db-create-p47cm" Oct 02 21:41:44 crc kubenswrapper[4636]: I1002 21:41:44.856454 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdnjd\" (UniqueName: \"kubernetes.io/projected/8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51-kube-api-access-bdnjd\") pod \"glance-db-create-p47cm\" (UID: \"8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51\") " pod="openstack/glance-db-create-p47cm" Oct 02 21:41:44 crc kubenswrapper[4636]: I1002 21:41:44.946171 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-p47cm" Oct 02 21:41:45 crc kubenswrapper[4636]: I1002 21:41:45.423305 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-p47cm"] Oct 02 21:41:46 crc kubenswrapper[4636]: I1002 21:41:46.274387 4636 generic.go:334] "Generic (PLEG): container finished" podID="8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51" containerID="9a4e3d8c8c2f66e0e4f98a9960e2f41c3e51934298856b32328857618c25a64b" exitCode=0 Oct 02 21:41:46 crc kubenswrapper[4636]: I1002 21:41:46.274496 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-p47cm" event={"ID":"8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51","Type":"ContainerDied","Data":"9a4e3d8c8c2f66e0e4f98a9960e2f41c3e51934298856b32328857618c25a64b"} Oct 02 21:41:46 crc kubenswrapper[4636]: I1002 21:41:46.274713 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-p47cm" event={"ID":"8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51","Type":"ContainerStarted","Data":"5a9ad60433595f121c78878911d0c5abf333534596e92fdb32a9140647ca18e3"} Oct 02 21:41:47 crc kubenswrapper[4636]: I1002 21:41:47.575908 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-p47cm" Oct 02 21:41:47 crc kubenswrapper[4636]: I1002 21:41:47.660848 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdnjd\" (UniqueName: \"kubernetes.io/projected/8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51-kube-api-access-bdnjd\") pod \"8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51\" (UID: \"8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51\") " Oct 02 21:41:47 crc kubenswrapper[4636]: I1002 21:41:47.682266 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51-kube-api-access-bdnjd" (OuterVolumeSpecName: "kube-api-access-bdnjd") pod "8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51" (UID: "8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51"). InnerVolumeSpecName "kube-api-access-bdnjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:47 crc kubenswrapper[4636]: I1002 21:41:47.763109 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdnjd\" (UniqueName: \"kubernetes.io/projected/8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51-kube-api-access-bdnjd\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.291131 4636 generic.go:334] "Generic (PLEG): container finished" podID="07626226-e803-4c29-a34d-9acea829a26b" containerID="a766c61a9e954dbf7b9583bf73f07007d9de65b1d272ebf7f9455db92f5d48ac" exitCode=0 Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.291689 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hnbwh" event={"ID":"07626226-e803-4c29-a34d-9acea829a26b","Type":"ContainerDied","Data":"a766c61a9e954dbf7b9583bf73f07007d9de65b1d272ebf7f9455db92f5d48ac"} Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.293851 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-p47cm" event={"ID":"8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51","Type":"ContainerDied","Data":"5a9ad60433595f121c78878911d0c5abf333534596e92fdb32a9140647ca18e3"} Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.293886 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a9ad60433595f121c78878911d0c5abf333534596e92fdb32a9140647ca18e3" Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.294076 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-p47cm" Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.677289 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.689940 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e9e8a5c2-1a89-4fb3-93d5-877930afc11d-etc-swift\") pod \"swift-storage-0\" (UID: \"e9e8a5c2-1a89-4fb3-93d5-877930afc11d\") " pod="openstack/swift-storage-0" Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.896948 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.918567 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-hspb6"] Oct 02 21:41:48 crc kubenswrapper[4636]: E1002 21:41:48.919603 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51" containerName="mariadb-database-create" Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.919633 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51" containerName="mariadb-database-create" Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.919924 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51" containerName="mariadb-database-create" Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.920686 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-hspb6" Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.940450 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-hspb6"] Oct 02 21:41:48 crc kubenswrapper[4636]: I1002 21:41:48.981643 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn98f\" (UniqueName: \"kubernetes.io/projected/6de6aa77-aec2-4d73-8bfb-3aed342368e3-kube-api-access-mn98f\") pod \"keystone-db-create-hspb6\" (UID: \"6de6aa77-aec2-4d73-8bfb-3aed342368e3\") " pod="openstack/keystone-db-create-hspb6" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.083261 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn98f\" (UniqueName: \"kubernetes.io/projected/6de6aa77-aec2-4d73-8bfb-3aed342368e3-kube-api-access-mn98f\") pod \"keystone-db-create-hspb6\" (UID: \"6de6aa77-aec2-4d73-8bfb-3aed342368e3\") " pod="openstack/keystone-db-create-hspb6" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.102587 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn98f\" (UniqueName: \"kubernetes.io/projected/6de6aa77-aec2-4d73-8bfb-3aed342368e3-kube-api-access-mn98f\") pod \"keystone-db-create-hspb6\" (UID: \"6de6aa77-aec2-4d73-8bfb-3aed342368e3\") " pod="openstack/keystone-db-create-hspb6" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.278411 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-g29vt"] Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.280052 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-g29vt" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.286407 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-g29vt"] Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.289108 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-hspb6" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.387962 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx82c\" (UniqueName: \"kubernetes.io/projected/0000f181-00c3-43d4-aead-d064250a0099-kube-api-access-tx82c\") pod \"placement-db-create-g29vt\" (UID: \"0000f181-00c3-43d4-aead-d064250a0099\") " pod="openstack/placement-db-create-g29vt" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.459729 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 02 21:41:49 crc kubenswrapper[4636]: W1002 21:41:49.484211 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9e8a5c2_1a89_4fb3_93d5_877930afc11d.slice/crio-f2fe7582ee0c134268e028c2092e6b6757342069198ab3119ed6251319a86b42 WatchSource:0}: Error finding container f2fe7582ee0c134268e028c2092e6b6757342069198ab3119ed6251319a86b42: Status 404 returned error can't find the container with id f2fe7582ee0c134268e028c2092e6b6757342069198ab3119ed6251319a86b42 Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.489833 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx82c\" (UniqueName: \"kubernetes.io/projected/0000f181-00c3-43d4-aead-d064250a0099-kube-api-access-tx82c\") pod \"placement-db-create-g29vt\" (UID: \"0000f181-00c3-43d4-aead-d064250a0099\") " pod="openstack/placement-db-create-g29vt" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.508478 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx82c\" (UniqueName: \"kubernetes.io/projected/0000f181-00c3-43d4-aead-d064250a0099-kube-api-access-tx82c\") pod \"placement-db-create-g29vt\" (UID: \"0000f181-00c3-43d4-aead-d064250a0099\") " pod="openstack/placement-db-create-g29vt" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.603965 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-g29vt" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.617875 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.691957 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-scripts\") pod \"07626226-e803-4c29-a34d-9acea829a26b\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.692290 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-swiftconf\") pod \"07626226-e803-4c29-a34d-9acea829a26b\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.692381 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-combined-ca-bundle\") pod \"07626226-e803-4c29-a34d-9acea829a26b\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.692408 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhz69\" (UniqueName: \"kubernetes.io/projected/07626226-e803-4c29-a34d-9acea829a26b-kube-api-access-vhz69\") pod \"07626226-e803-4c29-a34d-9acea829a26b\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.692466 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-ring-data-devices\") pod \"07626226-e803-4c29-a34d-9acea829a26b\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.692532 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07626226-e803-4c29-a34d-9acea829a26b-etc-swift\") pod \"07626226-e803-4c29-a34d-9acea829a26b\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.692568 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-dispersionconf\") pod \"07626226-e803-4c29-a34d-9acea829a26b\" (UID: \"07626226-e803-4c29-a34d-9acea829a26b\") " Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.696711 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "07626226-e803-4c29-a34d-9acea829a26b" (UID: "07626226-e803-4c29-a34d-9acea829a26b"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.719918 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07626226-e803-4c29-a34d-9acea829a26b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "07626226-e803-4c29-a34d-9acea829a26b" (UID: "07626226-e803-4c29-a34d-9acea829a26b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.728946 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-scripts" (OuterVolumeSpecName: "scripts") pod "07626226-e803-4c29-a34d-9acea829a26b" (UID: "07626226-e803-4c29-a34d-9acea829a26b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.734786 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07626226-e803-4c29-a34d-9acea829a26b-kube-api-access-vhz69" (OuterVolumeSpecName: "kube-api-access-vhz69") pod "07626226-e803-4c29-a34d-9acea829a26b" (UID: "07626226-e803-4c29-a34d-9acea829a26b"). InnerVolumeSpecName "kube-api-access-vhz69". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.738575 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "07626226-e803-4c29-a34d-9acea829a26b" (UID: "07626226-e803-4c29-a34d-9acea829a26b"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.746227 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "07626226-e803-4c29-a34d-9acea829a26b" (UID: "07626226-e803-4c29-a34d-9acea829a26b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.752095 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "07626226-e803-4c29-a34d-9acea829a26b" (UID: "07626226-e803-4c29-a34d-9acea829a26b"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.786888 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-hspb6"] Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.793887 4636 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.793913 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.793923 4636 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.793933 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07626226-e803-4c29-a34d-9acea829a26b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.793942 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhz69\" (UniqueName: \"kubernetes.io/projected/07626226-e803-4c29-a34d-9acea829a26b-kube-api-access-vhz69\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.793950 4636 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07626226-e803-4c29-a34d-9acea829a26b-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.793958 4636 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07626226-e803-4c29-a34d-9acea829a26b-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:49 crc kubenswrapper[4636]: I1002 21:41:49.799211 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-b5dv2" podUID="189fcf5f-fafd-4af5-9b02-d8d33b6bfe65" containerName="ovn-controller" probeResult="failure" output=< Oct 02 21:41:49 crc kubenswrapper[4636]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 02 21:41:49 crc kubenswrapper[4636]: > Oct 02 21:41:50 crc kubenswrapper[4636]: I1002 21:41:50.047199 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-g29vt"] Oct 02 21:41:50 crc kubenswrapper[4636]: W1002 21:41:50.054257 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0000f181_00c3_43d4_aead_d064250a0099.slice/crio-7511d880d111e53d4377816d822576a7633c3aa9e30f3d8f6d44781ad587a5ae WatchSource:0}: Error finding container 7511d880d111e53d4377816d822576a7633c3aa9e30f3d8f6d44781ad587a5ae: Status 404 returned error can't find the container with id 7511d880d111e53d4377816d822576a7633c3aa9e30f3d8f6d44781ad587a5ae Oct 02 21:41:50 crc kubenswrapper[4636]: I1002 21:41:50.315078 4636 generic.go:334] "Generic (PLEG): container finished" podID="0000f181-00c3-43d4-aead-d064250a0099" containerID="a47c4b54de34c13631f67bc8eaea0fd22d84829568527c0d680f1bd8b324af3e" exitCode=0 Oct 02 21:41:50 crc kubenswrapper[4636]: I1002 21:41:50.315623 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-g29vt" event={"ID":"0000f181-00c3-43d4-aead-d064250a0099","Type":"ContainerDied","Data":"a47c4b54de34c13631f67bc8eaea0fd22d84829568527c0d680f1bd8b324af3e"} Oct 02 21:41:50 crc kubenswrapper[4636]: I1002 21:41:50.315684 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-g29vt" event={"ID":"0000f181-00c3-43d4-aead-d064250a0099","Type":"ContainerStarted","Data":"7511d880d111e53d4377816d822576a7633c3aa9e30f3d8f6d44781ad587a5ae"} Oct 02 21:41:50 crc kubenswrapper[4636]: I1002 21:41:50.320542 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"f2fe7582ee0c134268e028c2092e6b6757342069198ab3119ed6251319a86b42"} Oct 02 21:41:50 crc kubenswrapper[4636]: I1002 21:41:50.322531 4636 generic.go:334] "Generic (PLEG): container finished" podID="6de6aa77-aec2-4d73-8bfb-3aed342368e3" containerID="185139f1ef47be28d7af755b107c10df3c76f9cdefb00e14bb91a80113e151c9" exitCode=0 Oct 02 21:41:50 crc kubenswrapper[4636]: I1002 21:41:50.322617 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-hspb6" event={"ID":"6de6aa77-aec2-4d73-8bfb-3aed342368e3","Type":"ContainerDied","Data":"185139f1ef47be28d7af755b107c10df3c76f9cdefb00e14bb91a80113e151c9"} Oct 02 21:41:50 crc kubenswrapper[4636]: I1002 21:41:50.322675 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-hspb6" event={"ID":"6de6aa77-aec2-4d73-8bfb-3aed342368e3","Type":"ContainerStarted","Data":"0d96baac029148498a7ca5993cb23c212fb7eb1c00deb62c0cb5b5d85afeec70"} Oct 02 21:41:50 crc kubenswrapper[4636]: I1002 21:41:50.324211 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hnbwh" event={"ID":"07626226-e803-4c29-a34d-9acea829a26b","Type":"ContainerDied","Data":"a6871884aa9cd582db4af6fe430e43181dc3824a79da2ce8f40ec223fc7be3a8"} Oct 02 21:41:50 crc kubenswrapper[4636]: I1002 21:41:50.324236 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6871884aa9cd582db4af6fe430e43181dc3824a79da2ce8f40ec223fc7be3a8" Oct 02 21:41:50 crc kubenswrapper[4636]: I1002 21:41:50.324308 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hnbwh" Oct 02 21:41:51 crc kubenswrapper[4636]: I1002 21:41:51.333126 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"ad0670c204492c0d445da0fd01e580758926927acac041c266e33eb45dd375f0"} Oct 02 21:41:51 crc kubenswrapper[4636]: I1002 21:41:51.333828 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"ad444b5dd7a6cfe5ed3828621f98bcaf0ec16e9a2b8dcd2e9f20de18e04a0696"} Oct 02 21:41:51 crc kubenswrapper[4636]: I1002 21:41:51.333846 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"94f9884dff8ddcd18ed2df1496b03321ce845896ca4dcad534e52da6b297700d"} Oct 02 21:41:51 crc kubenswrapper[4636]: I1002 21:41:51.333858 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"045ea796b8dd920740b64f425fe5fbc3c1f0a34d1ee38c6dc55ec0e94bee36cc"} Oct 02 21:41:51 crc kubenswrapper[4636]: I1002 21:41:51.663829 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-hspb6" Oct 02 21:41:51 crc kubenswrapper[4636]: I1002 21:41:51.722580 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mn98f\" (UniqueName: \"kubernetes.io/projected/6de6aa77-aec2-4d73-8bfb-3aed342368e3-kube-api-access-mn98f\") pod \"6de6aa77-aec2-4d73-8bfb-3aed342368e3\" (UID: \"6de6aa77-aec2-4d73-8bfb-3aed342368e3\") " Oct 02 21:41:51 crc kubenswrapper[4636]: I1002 21:41:51.729268 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6de6aa77-aec2-4d73-8bfb-3aed342368e3-kube-api-access-mn98f" (OuterVolumeSpecName: "kube-api-access-mn98f") pod "6de6aa77-aec2-4d73-8bfb-3aed342368e3" (UID: "6de6aa77-aec2-4d73-8bfb-3aed342368e3"). InnerVolumeSpecName "kube-api-access-mn98f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:51 crc kubenswrapper[4636]: I1002 21:41:51.816676 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-g29vt" Oct 02 21:41:51 crc kubenswrapper[4636]: I1002 21:41:51.824291 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mn98f\" (UniqueName: \"kubernetes.io/projected/6de6aa77-aec2-4d73-8bfb-3aed342368e3-kube-api-access-mn98f\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:51 crc kubenswrapper[4636]: I1002 21:41:51.925466 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tx82c\" (UniqueName: \"kubernetes.io/projected/0000f181-00c3-43d4-aead-d064250a0099-kube-api-access-tx82c\") pod \"0000f181-00c3-43d4-aead-d064250a0099\" (UID: \"0000f181-00c3-43d4-aead-d064250a0099\") " Oct 02 21:41:51 crc kubenswrapper[4636]: I1002 21:41:51.942279 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0000f181-00c3-43d4-aead-d064250a0099-kube-api-access-tx82c" (OuterVolumeSpecName: "kube-api-access-tx82c") pod "0000f181-00c3-43d4-aead-d064250a0099" (UID: "0000f181-00c3-43d4-aead-d064250a0099"). InnerVolumeSpecName "kube-api-access-tx82c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:52 crc kubenswrapper[4636]: I1002 21:41:52.027924 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tx82c\" (UniqueName: \"kubernetes.io/projected/0000f181-00c3-43d4-aead-d064250a0099-kube-api-access-tx82c\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:52 crc kubenswrapper[4636]: I1002 21:41:52.342942 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-hspb6" event={"ID":"6de6aa77-aec2-4d73-8bfb-3aed342368e3","Type":"ContainerDied","Data":"0d96baac029148498a7ca5993cb23c212fb7eb1c00deb62c0cb5b5d85afeec70"} Oct 02 21:41:52 crc kubenswrapper[4636]: I1002 21:41:52.342987 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d96baac029148498a7ca5993cb23c212fb7eb1c00deb62c0cb5b5d85afeec70" Oct 02 21:41:52 crc kubenswrapper[4636]: I1002 21:41:52.343003 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-hspb6" Oct 02 21:41:52 crc kubenswrapper[4636]: I1002 21:41:52.346923 4636 generic.go:334] "Generic (PLEG): container finished" podID="780601d4-af7f-47ee-b580-939d5531e805" containerID="1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a" exitCode=0 Oct 02 21:41:52 crc kubenswrapper[4636]: I1002 21:41:52.347010 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"780601d4-af7f-47ee-b580-939d5531e805","Type":"ContainerDied","Data":"1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a"} Oct 02 21:41:52 crc kubenswrapper[4636]: I1002 21:41:52.349851 4636 generic.go:334] "Generic (PLEG): container finished" podID="5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" containerID="0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7" exitCode=0 Oct 02 21:41:52 crc kubenswrapper[4636]: I1002 21:41:52.349979 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9","Type":"ContainerDied","Data":"0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7"} Oct 02 21:41:52 crc kubenswrapper[4636]: I1002 21:41:52.358079 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-g29vt" event={"ID":"0000f181-00c3-43d4-aead-d064250a0099","Type":"ContainerDied","Data":"7511d880d111e53d4377816d822576a7633c3aa9e30f3d8f6d44781ad587a5ae"} Oct 02 21:41:52 crc kubenswrapper[4636]: I1002 21:41:52.358111 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7511d880d111e53d4377816d822576a7633c3aa9e30f3d8f6d44781ad587a5ae" Oct 02 21:41:52 crc kubenswrapper[4636]: I1002 21:41:52.358179 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-g29vt" Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.117074 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.117391 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.369151 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"780601d4-af7f-47ee-b580-939d5531e805","Type":"ContainerStarted","Data":"ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd"} Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.369676 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.372517 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9","Type":"ContainerStarted","Data":"60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff"} Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.373205 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.384178 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"bcc8b355acd78184fbd7dd3ba81924f82ce369f9dc94ea4ef068a343dd0e76d7"} Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.384208 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"26c6e181e53569dc809c38763ffd51b4f500ea872c78d6f661c03db9f4b9dccb"} Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.384219 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"5e777c4379f314c4e496752994cf444519d3423f774ec95498d5a0128e348e5d"} Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.384228 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"91d710e803d68c6093f28972faf89b3a0daf72eaaab78a8d77c6f77756c2b657"} Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.415091 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=53.277342329 podStartE2EDuration="1m8.41507056s" podCreationTimestamp="2025-10-02 21:40:45 +0000 UTC" firstStartedPulling="2025-10-02 21:41:00.960247205 +0000 UTC m=+1052.283255224" lastFinishedPulling="2025-10-02 21:41:16.097975406 +0000 UTC m=+1067.420983455" observedRunningTime="2025-10-02 21:41:53.403344337 +0000 UTC m=+1104.726352356" watchObservedRunningTime="2025-10-02 21:41:53.41507056 +0000 UTC m=+1104.738078589" Oct 02 21:41:53 crc kubenswrapper[4636]: I1002 21:41:53.442697 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=52.698706478 podStartE2EDuration="1m9.442683237s" podCreationTimestamp="2025-10-02 21:40:44 +0000 UTC" firstStartedPulling="2025-10-02 21:40:59.354036908 +0000 UTC m=+1050.677044947" lastFinishedPulling="2025-10-02 21:41:16.098013657 +0000 UTC m=+1067.421021706" observedRunningTime="2025-10-02 21:41:53.434602582 +0000 UTC m=+1104.757610601" watchObservedRunningTime="2025-10-02 21:41:53.442683237 +0000 UTC m=+1104.765691256" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.397209 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"2feecac1fcf16841380bd3b0588dfba5b12eba5d0836ac3d41577942f67db528"} Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.683186 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-c6ae-account-create-x7vrb"] Oct 02 21:41:54 crc kubenswrapper[4636]: E1002 21:41:54.684091 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6de6aa77-aec2-4d73-8bfb-3aed342368e3" containerName="mariadb-database-create" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.684195 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6de6aa77-aec2-4d73-8bfb-3aed342368e3" containerName="mariadb-database-create" Oct 02 21:41:54 crc kubenswrapper[4636]: E1002 21:41:54.684302 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0000f181-00c3-43d4-aead-d064250a0099" containerName="mariadb-database-create" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.684387 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="0000f181-00c3-43d4-aead-d064250a0099" containerName="mariadb-database-create" Oct 02 21:41:54 crc kubenswrapper[4636]: E1002 21:41:54.684471 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07626226-e803-4c29-a34d-9acea829a26b" containerName="swift-ring-rebalance" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.684551 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="07626226-e803-4c29-a34d-9acea829a26b" containerName="swift-ring-rebalance" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.684846 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="6de6aa77-aec2-4d73-8bfb-3aed342368e3" containerName="mariadb-database-create" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.685515 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="07626226-e803-4c29-a34d-9acea829a26b" containerName="swift-ring-rebalance" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.687372 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="0000f181-00c3-43d4-aead-d064250a0099" containerName="mariadb-database-create" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.688418 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c6ae-account-create-x7vrb" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.690277 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.692888 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c6ae-account-create-x7vrb"] Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.777554 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzld6\" (UniqueName: \"kubernetes.io/projected/e3eecdab-0886-4fa2-8e2b-29a5894cca5d-kube-api-access-nzld6\") pod \"glance-c6ae-account-create-x7vrb\" (UID: \"e3eecdab-0886-4fa2-8e2b-29a5894cca5d\") " pod="openstack/glance-c6ae-account-create-x7vrb" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.799252 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-b5dv2" podUID="189fcf5f-fafd-4af5-9b02-d8d33b6bfe65" containerName="ovn-controller" probeResult="failure" output=< Oct 02 21:41:54 crc kubenswrapper[4636]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 02 21:41:54 crc kubenswrapper[4636]: > Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.879334 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzld6\" (UniqueName: \"kubernetes.io/projected/e3eecdab-0886-4fa2-8e2b-29a5894cca5d-kube-api-access-nzld6\") pod \"glance-c6ae-account-create-x7vrb\" (UID: \"e3eecdab-0886-4fa2-8e2b-29a5894cca5d\") " pod="openstack/glance-c6ae-account-create-x7vrb" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.901494 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzld6\" (UniqueName: \"kubernetes.io/projected/e3eecdab-0886-4fa2-8e2b-29a5894cca5d-kube-api-access-nzld6\") pod \"glance-c6ae-account-create-x7vrb\" (UID: \"e3eecdab-0886-4fa2-8e2b-29a5894cca5d\") " pod="openstack/glance-c6ae-account-create-x7vrb" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.910314 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:41:54 crc kubenswrapper[4636]: I1002 21:41:54.936734 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ffvpv" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.070231 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c6ae-account-create-x7vrb" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.173661 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-b5dv2-config-nzn66"] Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.180150 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.183022 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.190033 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-b5dv2-config-nzn66"] Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.292279 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.292535 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run-ovn\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.292568 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-log-ovn\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.292599 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7hsq\" (UniqueName: \"kubernetes.io/projected/b0117508-24ee-448f-9708-62ace19d1efa-kube-api-access-n7hsq\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.292614 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-additional-scripts\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.292659 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-scripts\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.410825 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-log-ovn\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.410947 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7hsq\" (UniqueName: \"kubernetes.io/projected/b0117508-24ee-448f-9708-62ace19d1efa-kube-api-access-n7hsq\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.410983 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-additional-scripts\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.411123 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-scripts\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.411293 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.411338 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run-ovn\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.411666 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run-ovn\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.411719 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-log-ovn\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.412564 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-additional-scripts\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.412621 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.427538 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-scripts\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.441825 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7hsq\" (UniqueName: \"kubernetes.io/projected/b0117508-24ee-448f-9708-62ace19d1efa-kube-api-access-n7hsq\") pod \"ovn-controller-b5dv2-config-nzn66\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.509919 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"209855e5a00d0e72a781ffbddb116ad70d45ccf90e551f475ce9e6adaa235527"} Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.510155 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"f6d694087d0da68abb54764278541671bef004a4216301dead0ea28256edd097"} Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.510165 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"e33ebb44b31088c4370f80f282a2c61ee1f056d1bdefe2da671075d1f7b036c1"} Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.510173 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"e5a04c7cf5ee5ce2d1b82b1760636aa7289c2f22a0306836b3e9c0263ee37ca0"} Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.510181 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"abc520033669e473010ca3927cc990825d8a049187e5597fe2fe70667d1fecb8"} Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.535330 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:55 crc kubenswrapper[4636]: I1002 21:41:55.540307 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c6ae-account-create-x7vrb"] Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.081544 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-b5dv2-config-nzn66"] Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.518527 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b5dv2-config-nzn66" event={"ID":"b0117508-24ee-448f-9708-62ace19d1efa","Type":"ContainerStarted","Data":"b46ff96ed1eda16567b38e61b3d8548936dfa92e2b86f859d0d44bc82bb6025c"} Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.518835 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b5dv2-config-nzn66" event={"ID":"b0117508-24ee-448f-9708-62ace19d1efa","Type":"ContainerStarted","Data":"ff893519f5f672c02fd508a0fda3b4635f9016a408f0f36f8cc14769596bdaeb"} Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.523731 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e9e8a5c2-1a89-4fb3-93d5-877930afc11d","Type":"ContainerStarted","Data":"cf191b6db51c0305328bb9c84fc95b123532029e427b48748d53587f5e65cc18"} Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.526481 4636 generic.go:334] "Generic (PLEG): container finished" podID="e3eecdab-0886-4fa2-8e2b-29a5894cca5d" containerID="13bb5e8e03cf1edf800c443ee030a7b7f7bfa19bae6b0242e1737552ab945c28" exitCode=0 Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.526518 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c6ae-account-create-x7vrb" event={"ID":"e3eecdab-0886-4fa2-8e2b-29a5894cca5d","Type":"ContainerDied","Data":"13bb5e8e03cf1edf800c443ee030a7b7f7bfa19bae6b0242e1737552ab945c28"} Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.526540 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c6ae-account-create-x7vrb" event={"ID":"e3eecdab-0886-4fa2-8e2b-29a5894cca5d","Type":"ContainerStarted","Data":"56e921419edc542b38993184dbfe1cfdb5919eccfb35cb375a93b62746d84964"} Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.541509 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-b5dv2-config-nzn66" podStartSLOduration=1.5414944099999999 podStartE2EDuration="1.54149441s" podCreationTimestamp="2025-10-02 21:41:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:41:56.537888963 +0000 UTC m=+1107.860896982" watchObservedRunningTime="2025-10-02 21:41:56.54149441 +0000 UTC m=+1107.864502429" Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.889203 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=21.329963607 podStartE2EDuration="25.889187532s" podCreationTimestamp="2025-10-02 21:41:31 +0000 UTC" firstStartedPulling="2025-10-02 21:41:49.487786107 +0000 UTC m=+1100.810794126" lastFinishedPulling="2025-10-02 21:41:54.047010032 +0000 UTC m=+1105.370018051" observedRunningTime="2025-10-02 21:41:56.594537386 +0000 UTC m=+1107.917545405" watchObservedRunningTime="2025-10-02 21:41:56.889187532 +0000 UTC m=+1108.212195551" Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.892576 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-t4n68"] Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.893863 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.903440 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 02 21:41:56 crc kubenswrapper[4636]: I1002 21:41:56.947607 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-t4n68"] Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.074286 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbbpz\" (UniqueName: \"kubernetes.io/projected/d0d73ce4-4765-4ef0-82bd-e07875e04521-kube-api-access-lbbpz\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.074670 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.074714 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.074763 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-config\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.074965 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.075050 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.176157 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.176246 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbbpz\" (UniqueName: \"kubernetes.io/projected/d0d73ce4-4765-4ef0-82bd-e07875e04521-kube-api-access-lbbpz\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.176283 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.176343 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.176408 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-config\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.176466 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.177368 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.177397 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.177408 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-config\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.177604 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.177723 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.201764 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbbpz\" (UniqueName: \"kubernetes.io/projected/d0d73ce4-4765-4ef0-82bd-e07875e04521-kube-api-access-lbbpz\") pod \"dnsmasq-dns-77585f5f8c-t4n68\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.208567 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.536710 4636 generic.go:334] "Generic (PLEG): container finished" podID="b0117508-24ee-448f-9708-62ace19d1efa" containerID="b46ff96ed1eda16567b38e61b3d8548936dfa92e2b86f859d0d44bc82bb6025c" exitCode=0 Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.536848 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b5dv2-config-nzn66" event={"ID":"b0117508-24ee-448f-9708-62ace19d1efa","Type":"ContainerDied","Data":"b46ff96ed1eda16567b38e61b3d8548936dfa92e2b86f859d0d44bc82bb6025c"} Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.673317 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-t4n68"] Oct 02 21:41:57 crc kubenswrapper[4636]: W1002 21:41:57.682505 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0d73ce4_4765_4ef0_82bd_e07875e04521.slice/crio-8909feb3260e80512ecda1fa583061a61b7a7dcfd80e90e7df15c4f28249d573 WatchSource:0}: Error finding container 8909feb3260e80512ecda1fa583061a61b7a7dcfd80e90e7df15c4f28249d573: Status 404 returned error can't find the container with id 8909feb3260e80512ecda1fa583061a61b7a7dcfd80e90e7df15c4f28249d573 Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.838696 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c6ae-account-create-x7vrb" Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.991932 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzld6\" (UniqueName: \"kubernetes.io/projected/e3eecdab-0886-4fa2-8e2b-29a5894cca5d-kube-api-access-nzld6\") pod \"e3eecdab-0886-4fa2-8e2b-29a5894cca5d\" (UID: \"e3eecdab-0886-4fa2-8e2b-29a5894cca5d\") " Oct 02 21:41:57 crc kubenswrapper[4636]: I1002 21:41:57.996073 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3eecdab-0886-4fa2-8e2b-29a5894cca5d-kube-api-access-nzld6" (OuterVolumeSpecName: "kube-api-access-nzld6") pod "e3eecdab-0886-4fa2-8e2b-29a5894cca5d" (UID: "e3eecdab-0886-4fa2-8e2b-29a5894cca5d"). InnerVolumeSpecName "kube-api-access-nzld6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:58 crc kubenswrapper[4636]: I1002 21:41:58.095027 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzld6\" (UniqueName: \"kubernetes.io/projected/e3eecdab-0886-4fa2-8e2b-29a5894cca5d-kube-api-access-nzld6\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:58 crc kubenswrapper[4636]: I1002 21:41:58.553258 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c6ae-account-create-x7vrb" Oct 02 21:41:58 crc kubenswrapper[4636]: I1002 21:41:58.553878 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c6ae-account-create-x7vrb" event={"ID":"e3eecdab-0886-4fa2-8e2b-29a5894cca5d","Type":"ContainerDied","Data":"56e921419edc542b38993184dbfe1cfdb5919eccfb35cb375a93b62746d84964"} Oct 02 21:41:58 crc kubenswrapper[4636]: I1002 21:41:58.553925 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56e921419edc542b38993184dbfe1cfdb5919eccfb35cb375a93b62746d84964" Oct 02 21:41:58 crc kubenswrapper[4636]: I1002 21:41:58.558832 4636 generic.go:334] "Generic (PLEG): container finished" podID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerID="a8063e9aa43ef67d600ce77d28d191addf4c478fdab74576bb346b8dc876ae01" exitCode=0 Oct 02 21:41:58 crc kubenswrapper[4636]: I1002 21:41:58.558940 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" event={"ID":"d0d73ce4-4765-4ef0-82bd-e07875e04521","Type":"ContainerDied","Data":"a8063e9aa43ef67d600ce77d28d191addf4c478fdab74576bb346b8dc876ae01"} Oct 02 21:41:58 crc kubenswrapper[4636]: I1002 21:41:58.558975 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" event={"ID":"d0d73ce4-4765-4ef0-82bd-e07875e04521","Type":"ContainerStarted","Data":"8909feb3260e80512ecda1fa583061a61b7a7dcfd80e90e7df15c4f28249d573"} Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.039259 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.081061 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9ede-account-create-8c85t"] Oct 02 21:41:59 crc kubenswrapper[4636]: E1002 21:41:59.081408 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3eecdab-0886-4fa2-8e2b-29a5894cca5d" containerName="mariadb-account-create" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.081425 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3eecdab-0886-4fa2-8e2b-29a5894cca5d" containerName="mariadb-account-create" Oct 02 21:41:59 crc kubenswrapper[4636]: E1002 21:41:59.081435 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0117508-24ee-448f-9708-62ace19d1efa" containerName="ovn-config" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.081441 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0117508-24ee-448f-9708-62ace19d1efa" containerName="ovn-config" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.081627 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0117508-24ee-448f-9708-62ace19d1efa" containerName="ovn-config" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.081644 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3eecdab-0886-4fa2-8e2b-29a5894cca5d" containerName="mariadb-account-create" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.082270 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9ede-account-create-8c85t" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.085288 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.091418 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9ede-account-create-8c85t"] Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.216319 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-scripts\") pod \"b0117508-24ee-448f-9708-62ace19d1efa\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.216436 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-log-ovn\") pod \"b0117508-24ee-448f-9708-62ace19d1efa\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.216464 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run\") pod \"b0117508-24ee-448f-9708-62ace19d1efa\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.216481 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7hsq\" (UniqueName: \"kubernetes.io/projected/b0117508-24ee-448f-9708-62ace19d1efa-kube-api-access-n7hsq\") pod \"b0117508-24ee-448f-9708-62ace19d1efa\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.216515 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run-ovn\") pod \"b0117508-24ee-448f-9708-62ace19d1efa\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.216595 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-additional-scripts\") pod \"b0117508-24ee-448f-9708-62ace19d1efa\" (UID: \"b0117508-24ee-448f-9708-62ace19d1efa\") " Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.216866 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v92mz\" (UniqueName: \"kubernetes.io/projected/d3946d1d-bde7-4a87-995b-090a8d334f3e-kube-api-access-v92mz\") pod \"keystone-9ede-account-create-8c85t\" (UID: \"d3946d1d-bde7-4a87-995b-090a8d334f3e\") " pod="openstack/keystone-9ede-account-create-8c85t" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.217111 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run" (OuterVolumeSpecName: "var-run") pod "b0117508-24ee-448f-9708-62ace19d1efa" (UID: "b0117508-24ee-448f-9708-62ace19d1efa"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.217227 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "b0117508-24ee-448f-9708-62ace19d1efa" (UID: "b0117508-24ee-448f-9708-62ace19d1efa"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.217756 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "b0117508-24ee-448f-9708-62ace19d1efa" (UID: "b0117508-24ee-448f-9708-62ace19d1efa"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.217893 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "b0117508-24ee-448f-9708-62ace19d1efa" (UID: "b0117508-24ee-448f-9708-62ace19d1efa"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.217982 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-scripts" (OuterVolumeSpecName: "scripts") pod "b0117508-24ee-448f-9708-62ace19d1efa" (UID: "b0117508-24ee-448f-9708-62ace19d1efa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.222508 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0117508-24ee-448f-9708-62ace19d1efa-kube-api-access-n7hsq" (OuterVolumeSpecName: "kube-api-access-n7hsq") pod "b0117508-24ee-448f-9708-62ace19d1efa" (UID: "b0117508-24ee-448f-9708-62ace19d1efa"). InnerVolumeSpecName "kube-api-access-n7hsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.318661 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v92mz\" (UniqueName: \"kubernetes.io/projected/d3946d1d-bde7-4a87-995b-090a8d334f3e-kube-api-access-v92mz\") pod \"keystone-9ede-account-create-8c85t\" (UID: \"d3946d1d-bde7-4a87-995b-090a8d334f3e\") " pod="openstack/keystone-9ede-account-create-8c85t" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.318859 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.318875 4636 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.318886 4636 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.318899 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7hsq\" (UniqueName: \"kubernetes.io/projected/b0117508-24ee-448f-9708-62ace19d1efa-kube-api-access-n7hsq\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.318911 4636 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b0117508-24ee-448f-9708-62ace19d1efa-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.318921 4636 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b0117508-24ee-448f-9708-62ace19d1efa-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.338422 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v92mz\" (UniqueName: \"kubernetes.io/projected/d3946d1d-bde7-4a87-995b-090a8d334f3e-kube-api-access-v92mz\") pod \"keystone-9ede-account-create-8c85t\" (UID: \"d3946d1d-bde7-4a87-995b-090a8d334f3e\") " pod="openstack/keystone-9ede-account-create-8c85t" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.404876 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9ede-account-create-8c85t" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.409667 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-b879-account-create-2k959"] Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.410646 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b879-account-create-2k959" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.417365 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.426480 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b879-account-create-2k959"] Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.528407 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nspwk\" (UniqueName: \"kubernetes.io/projected/b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389-kube-api-access-nspwk\") pod \"placement-b879-account-create-2k959\" (UID: \"b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389\") " pod="openstack/placement-b879-account-create-2k959" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.570482 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" event={"ID":"d0d73ce4-4765-4ef0-82bd-e07875e04521","Type":"ContainerStarted","Data":"f0ca5ffde1b510078fe1d9abb2180a0e8ee1ac0a46c7a84e2216e2d9ac6a0765"} Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.570922 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.572872 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b5dv2-config-nzn66" event={"ID":"b0117508-24ee-448f-9708-62ace19d1efa","Type":"ContainerDied","Data":"ff893519f5f672c02fd508a0fda3b4635f9016a408f0f36f8cc14769596bdaeb"} Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.572904 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff893519f5f672c02fd508a0fda3b4635f9016a408f0f36f8cc14769596bdaeb" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.572955 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b5dv2-config-nzn66" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.607535 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" podStartSLOduration=3.607519576 podStartE2EDuration="3.607519576s" podCreationTimestamp="2025-10-02 21:41:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:41:59.604884696 +0000 UTC m=+1110.927892705" watchObservedRunningTime="2025-10-02 21:41:59.607519576 +0000 UTC m=+1110.930527605" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.631922 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nspwk\" (UniqueName: \"kubernetes.io/projected/b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389-kube-api-access-nspwk\") pod \"placement-b879-account-create-2k959\" (UID: \"b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389\") " pod="openstack/placement-b879-account-create-2k959" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.660548 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nspwk\" (UniqueName: \"kubernetes.io/projected/b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389-kube-api-access-nspwk\") pod \"placement-b879-account-create-2k959\" (UID: \"b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389\") " pod="openstack/placement-b879-account-create-2k959" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.691170 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-b5dv2-config-nzn66"] Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.696993 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-b5dv2-config-nzn66"] Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.798621 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9ede-account-create-8c85t"] Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.801069 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-b5dv2" Oct 02 21:41:59 crc kubenswrapper[4636]: W1002 21:41:59.807311 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3946d1d_bde7_4a87_995b_090a8d334f3e.slice/crio-eee4cc4f437367cc0811337229386c8be810cf9686f3827d142e2c60921f9058 WatchSource:0}: Error finding container eee4cc4f437367cc0811337229386c8be810cf9686f3827d142e2c60921f9058: Status 404 returned error can't find the container with id eee4cc4f437367cc0811337229386c8be810cf9686f3827d142e2c60921f9058 Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.815798 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b879-account-create-2k959" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.923865 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-qgkxv"] Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.924760 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qgkxv" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.929562 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-f8hzc" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.938032 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 02 21:41:59 crc kubenswrapper[4636]: I1002 21:41:59.957459 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-qgkxv"] Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.037409 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-config-data\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.037458 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-combined-ca-bundle\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.037519 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-db-sync-config-data\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.037553 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4znw\" (UniqueName: \"kubernetes.io/projected/4f7bc59e-3c13-4a51-9494-d45734d6c70c-kube-api-access-w4znw\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.138525 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-config-data\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.138712 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-combined-ca-bundle\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.138790 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-db-sync-config-data\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.138825 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4znw\" (UniqueName: \"kubernetes.io/projected/4f7bc59e-3c13-4a51-9494-d45734d6c70c-kube-api-access-w4znw\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.146441 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-db-sync-config-data\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.149102 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-config-data\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.162784 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-combined-ca-bundle\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.175519 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4znw\" (UniqueName: \"kubernetes.io/projected/4f7bc59e-3c13-4a51-9494-d45734d6c70c-kube-api-access-w4znw\") pod \"glance-db-sync-qgkxv\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.250111 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.487561 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b879-account-create-2k959"] Oct 02 21:42:00 crc kubenswrapper[4636]: W1002 21:42:00.489651 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7c0f3df_7c2a_4ee3_8aa2_04dc2b2b8389.slice/crio-e35647ca21adbe96c863af91113713a1f006f8a5f33d7adba7b0c73b898e9e81 WatchSource:0}: Error finding container e35647ca21adbe96c863af91113713a1f006f8a5f33d7adba7b0c73b898e9e81: Status 404 returned error can't find the container with id e35647ca21adbe96c863af91113713a1f006f8a5f33d7adba7b0c73b898e9e81 Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.583716 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b879-account-create-2k959" event={"ID":"b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389","Type":"ContainerStarted","Data":"e35647ca21adbe96c863af91113713a1f006f8a5f33d7adba7b0c73b898e9e81"} Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.587987 4636 generic.go:334] "Generic (PLEG): container finished" podID="d3946d1d-bde7-4a87-995b-090a8d334f3e" containerID="518ecb3cc6381d451823ee69e3aaf95454ff825101bad8d3008f25b0a482ca75" exitCode=0 Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.588031 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9ede-account-create-8c85t" event={"ID":"d3946d1d-bde7-4a87-995b-090a8d334f3e","Type":"ContainerDied","Data":"518ecb3cc6381d451823ee69e3aaf95454ff825101bad8d3008f25b0a482ca75"} Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.588074 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9ede-account-create-8c85t" event={"ID":"d3946d1d-bde7-4a87-995b-090a8d334f3e","Type":"ContainerStarted","Data":"eee4cc4f437367cc0811337229386c8be810cf9686f3827d142e2c60921f9058"} Oct 02 21:42:00 crc kubenswrapper[4636]: I1002 21:42:00.682554 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-qgkxv"] Oct 02 21:42:00 crc kubenswrapper[4636]: W1002 21:42:00.692158 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f7bc59e_3c13_4a51_9494_d45734d6c70c.slice/crio-2260e9c53e016c46c979e254cad13142e4490fa281de5634276f82d67fe34c5f WatchSource:0}: Error finding container 2260e9c53e016c46c979e254cad13142e4490fa281de5634276f82d67fe34c5f: Status 404 returned error can't find the container with id 2260e9c53e016c46c979e254cad13142e4490fa281de5634276f82d67fe34c5f Oct 02 21:42:01 crc kubenswrapper[4636]: I1002 21:42:01.598142 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qgkxv" event={"ID":"4f7bc59e-3c13-4a51-9494-d45734d6c70c","Type":"ContainerStarted","Data":"2260e9c53e016c46c979e254cad13142e4490fa281de5634276f82d67fe34c5f"} Oct 02 21:42:01 crc kubenswrapper[4636]: I1002 21:42:01.600627 4636 generic.go:334] "Generic (PLEG): container finished" podID="b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389" containerID="f5e34c92b6228decccc445d6577c52c3362e34ac837de2e2b31811c1c5beb30b" exitCode=0 Oct 02 21:42:01 crc kubenswrapper[4636]: I1002 21:42:01.600737 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b879-account-create-2k959" event={"ID":"b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389","Type":"ContainerDied","Data":"f5e34c92b6228decccc445d6577c52c3362e34ac837de2e2b31811c1c5beb30b"} Oct 02 21:42:01 crc kubenswrapper[4636]: I1002 21:42:01.619612 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0117508-24ee-448f-9708-62ace19d1efa" path="/var/lib/kubelet/pods/b0117508-24ee-448f-9708-62ace19d1efa/volumes" Oct 02 21:42:02 crc kubenswrapper[4636]: I1002 21:42:02.077867 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9ede-account-create-8c85t" Oct 02 21:42:02 crc kubenswrapper[4636]: I1002 21:42:02.169068 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v92mz\" (UniqueName: \"kubernetes.io/projected/d3946d1d-bde7-4a87-995b-090a8d334f3e-kube-api-access-v92mz\") pod \"d3946d1d-bde7-4a87-995b-090a8d334f3e\" (UID: \"d3946d1d-bde7-4a87-995b-090a8d334f3e\") " Oct 02 21:42:02 crc kubenswrapper[4636]: I1002 21:42:02.193402 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3946d1d-bde7-4a87-995b-090a8d334f3e-kube-api-access-v92mz" (OuterVolumeSpecName: "kube-api-access-v92mz") pod "d3946d1d-bde7-4a87-995b-090a8d334f3e" (UID: "d3946d1d-bde7-4a87-995b-090a8d334f3e"). InnerVolumeSpecName "kube-api-access-v92mz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:02 crc kubenswrapper[4636]: I1002 21:42:02.271096 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v92mz\" (UniqueName: \"kubernetes.io/projected/d3946d1d-bde7-4a87-995b-090a8d334f3e-kube-api-access-v92mz\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:02 crc kubenswrapper[4636]: I1002 21:42:02.611061 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9ede-account-create-8c85t" event={"ID":"d3946d1d-bde7-4a87-995b-090a8d334f3e","Type":"ContainerDied","Data":"eee4cc4f437367cc0811337229386c8be810cf9686f3827d142e2c60921f9058"} Oct 02 21:42:02 crc kubenswrapper[4636]: I1002 21:42:02.611394 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eee4cc4f437367cc0811337229386c8be810cf9686f3827d142e2c60921f9058" Oct 02 21:42:02 crc kubenswrapper[4636]: I1002 21:42:02.611116 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9ede-account-create-8c85t" Oct 02 21:42:02 crc kubenswrapper[4636]: I1002 21:42:02.927494 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b879-account-create-2k959" Oct 02 21:42:03 crc kubenswrapper[4636]: I1002 21:42:03.083637 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nspwk\" (UniqueName: \"kubernetes.io/projected/b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389-kube-api-access-nspwk\") pod \"b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389\" (UID: \"b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389\") " Oct 02 21:42:03 crc kubenswrapper[4636]: I1002 21:42:03.106866 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389-kube-api-access-nspwk" (OuterVolumeSpecName: "kube-api-access-nspwk") pod "b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389" (UID: "b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389"). InnerVolumeSpecName "kube-api-access-nspwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:03 crc kubenswrapper[4636]: I1002 21:42:03.185938 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nspwk\" (UniqueName: \"kubernetes.io/projected/b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389-kube-api-access-nspwk\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:03 crc kubenswrapper[4636]: I1002 21:42:03.620907 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b879-account-create-2k959" event={"ID":"b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389","Type":"ContainerDied","Data":"e35647ca21adbe96c863af91113713a1f006f8a5f33d7adba7b0c73b898e9e81"} Oct 02 21:42:03 crc kubenswrapper[4636]: I1002 21:42:03.621177 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e35647ca21adbe96c863af91113713a1f006f8a5f33d7adba7b0c73b898e9e81" Oct 02 21:42:03 crc kubenswrapper[4636]: I1002 21:42:03.620949 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b879-account-create-2k959" Oct 02 21:42:06 crc kubenswrapper[4636]: I1002 21:42:06.154931 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:42:06 crc kubenswrapper[4636]: I1002 21:42:06.643566 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 02 21:42:07 crc kubenswrapper[4636]: I1002 21:42:07.210873 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:42:07 crc kubenswrapper[4636]: I1002 21:42:07.276319 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-ndhlh"] Oct 02 21:42:07 crc kubenswrapper[4636]: I1002 21:42:07.276554 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-ndhlh" podUID="6eecc450-86a9-4d48-957d-5c8972c14bc6" containerName="dnsmasq-dns" containerID="cri-o://49b2eb00b7529cf6b18cc90875079588f9396d4010e7058cdf2de8fcdaa31f1a" gracePeriod=10 Oct 02 21:42:07 crc kubenswrapper[4636]: I1002 21:42:07.654043 4636 generic.go:334] "Generic (PLEG): container finished" podID="6eecc450-86a9-4d48-957d-5c8972c14bc6" containerID="49b2eb00b7529cf6b18cc90875079588f9396d4010e7058cdf2de8fcdaa31f1a" exitCode=0 Oct 02 21:42:07 crc kubenswrapper[4636]: I1002 21:42:07.654082 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-ndhlh" event={"ID":"6eecc450-86a9-4d48-957d-5c8972c14bc6","Type":"ContainerDied","Data":"49b2eb00b7529cf6b18cc90875079588f9396d4010e7058cdf2de8fcdaa31f1a"} Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.276878 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-f96c4"] Oct 02 21:42:08 crc kubenswrapper[4636]: E1002 21:42:08.277222 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389" containerName="mariadb-account-create" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.277238 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389" containerName="mariadb-account-create" Oct 02 21:42:08 crc kubenswrapper[4636]: E1002 21:42:08.277257 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3946d1d-bde7-4a87-995b-090a8d334f3e" containerName="mariadb-account-create" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.277264 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3946d1d-bde7-4a87-995b-090a8d334f3e" containerName="mariadb-account-create" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.277470 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3946d1d-bde7-4a87-995b-090a8d334f3e" containerName="mariadb-account-create" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.277489 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389" containerName="mariadb-account-create" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.278049 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-f96c4" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.300637 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-f96c4"] Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.366246 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swcj2\" (UniqueName: \"kubernetes.io/projected/8eef44f3-49f5-40c6-b160-a72304983227-kube-api-access-swcj2\") pod \"cinder-db-create-f96c4\" (UID: \"8eef44f3-49f5-40c6-b160-a72304983227\") " pod="openstack/cinder-db-create-f96c4" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.401783 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-zfgnq"] Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.402871 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zfgnq" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.419203 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-zfgnq"] Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.473490 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swcj2\" (UniqueName: \"kubernetes.io/projected/8eef44f3-49f5-40c6-b160-a72304983227-kube-api-access-swcj2\") pod \"cinder-db-create-f96c4\" (UID: \"8eef44f3-49f5-40c6-b160-a72304983227\") " pod="openstack/cinder-db-create-f96c4" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.532488 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swcj2\" (UniqueName: \"kubernetes.io/projected/8eef44f3-49f5-40c6-b160-a72304983227-kube-api-access-swcj2\") pod \"cinder-db-create-f96c4\" (UID: \"8eef44f3-49f5-40c6-b160-a72304983227\") " pod="openstack/cinder-db-create-f96c4" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.575188 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd29l\" (UniqueName: \"kubernetes.io/projected/7bb5e7d3-f883-4823-b53f-4fc2abd1e716-kube-api-access-sd29l\") pod \"barbican-db-create-zfgnq\" (UID: \"7bb5e7d3-f883-4823-b53f-4fc2abd1e716\") " pod="openstack/barbican-db-create-zfgnq" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.596020 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-f96c4" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.674087 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-wzgzt"] Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.675256 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wzgzt" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.676863 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd29l\" (UniqueName: \"kubernetes.io/projected/7bb5e7d3-f883-4823-b53f-4fc2abd1e716-kube-api-access-sd29l\") pod \"barbican-db-create-zfgnq\" (UID: \"7bb5e7d3-f883-4823-b53f-4fc2abd1e716\") " pod="openstack/barbican-db-create-zfgnq" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.698975 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-wzgzt"] Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.738524 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-qdzwb"] Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.739533 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.742479 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd29l\" (UniqueName: \"kubernetes.io/projected/7bb5e7d3-f883-4823-b53f-4fc2abd1e716-kube-api-access-sd29l\") pod \"barbican-db-create-zfgnq\" (UID: \"7bb5e7d3-f883-4823-b53f-4fc2abd1e716\") " pod="openstack/barbican-db-create-zfgnq" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.752491 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.752692 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jrtvb" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.752834 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.753009 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.776223 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qdzwb"] Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.781240 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7kqn\" (UniqueName: \"kubernetes.io/projected/a4a90155-455b-4a0f-b579-98d7a261db7b-kube-api-access-k7kqn\") pod \"neutron-db-create-wzgzt\" (UID: \"a4a90155-455b-4a0f-b579-98d7a261db7b\") " pod="openstack/neutron-db-create-wzgzt" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.882930 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw4fk\" (UniqueName: \"kubernetes.io/projected/928272a1-ab4f-42e0-9fa1-64228b77271f-kube-api-access-vw4fk\") pod \"keystone-db-sync-qdzwb\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.882979 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7kqn\" (UniqueName: \"kubernetes.io/projected/a4a90155-455b-4a0f-b579-98d7a261db7b-kube-api-access-k7kqn\") pod \"neutron-db-create-wzgzt\" (UID: \"a4a90155-455b-4a0f-b579-98d7a261db7b\") " pod="openstack/neutron-db-create-wzgzt" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.883020 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-config-data\") pod \"keystone-db-sync-qdzwb\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.883056 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-combined-ca-bundle\") pod \"keystone-db-sync-qdzwb\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.915545 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7kqn\" (UniqueName: \"kubernetes.io/projected/a4a90155-455b-4a0f-b579-98d7a261db7b-kube-api-access-k7kqn\") pod \"neutron-db-create-wzgzt\" (UID: \"a4a90155-455b-4a0f-b579-98d7a261db7b\") " pod="openstack/neutron-db-create-wzgzt" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.984180 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-combined-ca-bundle\") pod \"keystone-db-sync-qdzwb\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.984287 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw4fk\" (UniqueName: \"kubernetes.io/projected/928272a1-ab4f-42e0-9fa1-64228b77271f-kube-api-access-vw4fk\") pod \"keystone-db-sync-qdzwb\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.984341 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-config-data\") pod \"keystone-db-sync-qdzwb\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.989805 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-config-data\") pod \"keystone-db-sync-qdzwb\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.997052 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wzgzt" Oct 02 21:42:08 crc kubenswrapper[4636]: I1002 21:42:08.998346 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-combined-ca-bundle\") pod \"keystone-db-sync-qdzwb\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:09 crc kubenswrapper[4636]: I1002 21:42:09.007303 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw4fk\" (UniqueName: \"kubernetes.io/projected/928272a1-ab4f-42e0-9fa1-64228b77271f-kube-api-access-vw4fk\") pod \"keystone-db-sync-qdzwb\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:09 crc kubenswrapper[4636]: I1002 21:42:09.028041 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zfgnq" Oct 02 21:42:09 crc kubenswrapper[4636]: I1002 21:42:09.105545 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:11 crc kubenswrapper[4636]: I1002 21:42:11.878676 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-ndhlh" podUID="6eecc450-86a9-4d48-957d-5c8972c14bc6" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Oct 02 21:42:16 crc kubenswrapper[4636]: E1002 21:42:16.338460 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Oct 02 21:42:16 crc kubenswrapper[4636]: E1002 21:42:16.338938 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w4znw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-qgkxv_openstack(4f7bc59e-3c13-4a51-9494-d45734d6c70c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:42:16 crc kubenswrapper[4636]: E1002 21:42:16.340157 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-qgkxv" podUID="4f7bc59e-3c13-4a51-9494-d45734d6c70c" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.637157 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.731388 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-ndhlh" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.731611 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-ndhlh" event={"ID":"6eecc450-86a9-4d48-957d-5c8972c14bc6","Type":"ContainerDied","Data":"8d8aa8c7ed70ee7ed9b624dc05a57d8164d8566a5beb2b36d83216e2f41d8b0b"} Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.731655 4636 scope.go:117] "RemoveContainer" containerID="49b2eb00b7529cf6b18cc90875079588f9396d4010e7058cdf2de8fcdaa31f1a" Oct 02 21:42:16 crc kubenswrapper[4636]: E1002 21:42:16.734091 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-qgkxv" podUID="4f7bc59e-3c13-4a51-9494-d45734d6c70c" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.775453 4636 scope.go:117] "RemoveContainer" containerID="da55a27041a2a076b4960065059118856e8be8b3c31c39d1dd538dfdac8a3a7b" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.822783 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-dns-svc\") pod \"6eecc450-86a9-4d48-957d-5c8972c14bc6\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.822825 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgjsn\" (UniqueName: \"kubernetes.io/projected/6eecc450-86a9-4d48-957d-5c8972c14bc6-kube-api-access-kgjsn\") pod \"6eecc450-86a9-4d48-957d-5c8972c14bc6\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.822881 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-config\") pod \"6eecc450-86a9-4d48-957d-5c8972c14bc6\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.822912 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-sb\") pod \"6eecc450-86a9-4d48-957d-5c8972c14bc6\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.822964 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-nb\") pod \"6eecc450-86a9-4d48-957d-5c8972c14bc6\" (UID: \"6eecc450-86a9-4d48-957d-5c8972c14bc6\") " Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.836341 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eecc450-86a9-4d48-957d-5c8972c14bc6-kube-api-access-kgjsn" (OuterVolumeSpecName: "kube-api-access-kgjsn") pod "6eecc450-86a9-4d48-957d-5c8972c14bc6" (UID: "6eecc450-86a9-4d48-957d-5c8972c14bc6"). InnerVolumeSpecName "kube-api-access-kgjsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.877515 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-zfgnq"] Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.884456 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6eecc450-86a9-4d48-957d-5c8972c14bc6" (UID: "6eecc450-86a9-4d48-957d-5c8972c14bc6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:16 crc kubenswrapper[4636]: W1002 21:42:16.889512 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bb5e7d3_f883_4823_b53f_4fc2abd1e716.slice/crio-4b57821526f6db3369d5f444b6a23a5c51212354e612d7267daea6d33ce73f39 WatchSource:0}: Error finding container 4b57821526f6db3369d5f444b6a23a5c51212354e612d7267daea6d33ce73f39: Status 404 returned error can't find the container with id 4b57821526f6db3369d5f444b6a23a5c51212354e612d7267daea6d33ce73f39 Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.894974 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-f96c4"] Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.899444 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6eecc450-86a9-4d48-957d-5c8972c14bc6" (UID: "6eecc450-86a9-4d48-957d-5c8972c14bc6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.899688 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qdzwb"] Oct 02 21:42:16 crc kubenswrapper[4636]: W1002 21:42:16.900341 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8eef44f3_49f5_40c6_b160_a72304983227.slice/crio-de3b309e08f7c66d4a6a5189b4283e1048ac8569cb6b589e255b693eb18b3ded WatchSource:0}: Error finding container de3b309e08f7c66d4a6a5189b4283e1048ac8569cb6b589e255b693eb18b3ded: Status 404 returned error can't find the container with id de3b309e08f7c66d4a6a5189b4283e1048ac8569cb6b589e255b693eb18b3ded Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.908296 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-config" (OuterVolumeSpecName: "config") pod "6eecc450-86a9-4d48-957d-5c8972c14bc6" (UID: "6eecc450-86a9-4d48-957d-5c8972c14bc6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.909227 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6eecc450-86a9-4d48-957d-5c8972c14bc6" (UID: "6eecc450-86a9-4d48-957d-5c8972c14bc6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.929379 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.929412 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgjsn\" (UniqueName: \"kubernetes.io/projected/6eecc450-86a9-4d48-957d-5c8972c14bc6-kube-api-access-kgjsn\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.929424 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.929432 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.929443 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6eecc450-86a9-4d48-957d-5c8972c14bc6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:16 crc kubenswrapper[4636]: I1002 21:42:16.988702 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-wzgzt"] Oct 02 21:42:17 crc kubenswrapper[4636]: W1002 21:42:17.010786 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4a90155_455b_4a0f_b579_98d7a261db7b.slice/crio-870f318c9673c4779f84e1fe6c09fc9e6f88eed4da75ac62ed2a2a126b75efcc WatchSource:0}: Error finding container 870f318c9673c4779f84e1fe6c09fc9e6f88eed4da75ac62ed2a2a126b75efcc: Status 404 returned error can't find the container with id 870f318c9673c4779f84e1fe6c09fc9e6f88eed4da75ac62ed2a2a126b75efcc Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.070284 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-ndhlh"] Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.076194 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-ndhlh"] Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.612615 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6eecc450-86a9-4d48-957d-5c8972c14bc6" path="/var/lib/kubelet/pods/6eecc450-86a9-4d48-957d-5c8972c14bc6/volumes" Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.739510 4636 generic.go:334] "Generic (PLEG): container finished" podID="a4a90155-455b-4a0f-b579-98d7a261db7b" containerID="06b2fc9f60d0d941269d5abd3a4041dd72c08360ffa03a7e4178ea8e99f2025a" exitCode=0 Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.739602 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-wzgzt" event={"ID":"a4a90155-455b-4a0f-b579-98d7a261db7b","Type":"ContainerDied","Data":"06b2fc9f60d0d941269d5abd3a4041dd72c08360ffa03a7e4178ea8e99f2025a"} Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.739631 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-wzgzt" event={"ID":"a4a90155-455b-4a0f-b579-98d7a261db7b","Type":"ContainerStarted","Data":"870f318c9673c4779f84e1fe6c09fc9e6f88eed4da75ac62ed2a2a126b75efcc"} Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.742337 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qdzwb" event={"ID":"928272a1-ab4f-42e0-9fa1-64228b77271f","Type":"ContainerStarted","Data":"341c86e63e66c09d8cc6e8f63357a6cb67df9714076ca8a54f7bb20afbd09bac"} Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.744307 4636 generic.go:334] "Generic (PLEG): container finished" podID="8eef44f3-49f5-40c6-b160-a72304983227" containerID="89309f63b4977f58f0aad4acb842e8d4b758f25aa59d05b6f836880482b59440" exitCode=0 Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.744400 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-f96c4" event={"ID":"8eef44f3-49f5-40c6-b160-a72304983227","Type":"ContainerDied","Data":"89309f63b4977f58f0aad4acb842e8d4b758f25aa59d05b6f836880482b59440"} Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.744435 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-f96c4" event={"ID":"8eef44f3-49f5-40c6-b160-a72304983227","Type":"ContainerStarted","Data":"de3b309e08f7c66d4a6a5189b4283e1048ac8569cb6b589e255b693eb18b3ded"} Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.745953 4636 generic.go:334] "Generic (PLEG): container finished" podID="7bb5e7d3-f883-4823-b53f-4fc2abd1e716" containerID="03dddc28db964c86107328fbde314727a15bc6a5c6f25bb6566323c5d48c2629" exitCode=0 Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.745979 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-zfgnq" event={"ID":"7bb5e7d3-f883-4823-b53f-4fc2abd1e716","Type":"ContainerDied","Data":"03dddc28db964c86107328fbde314727a15bc6a5c6f25bb6566323c5d48c2629"} Oct 02 21:42:17 crc kubenswrapper[4636]: I1002 21:42:17.745994 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-zfgnq" event={"ID":"7bb5e7d3-f883-4823-b53f-4fc2abd1e716","Type":"ContainerStarted","Data":"4b57821526f6db3369d5f444b6a23a5c51212354e612d7267daea6d33ce73f39"} Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.357962 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-f96c4" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.358270 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zfgnq" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.395174 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wzgzt" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.497581 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7kqn\" (UniqueName: \"kubernetes.io/projected/a4a90155-455b-4a0f-b579-98d7a261db7b-kube-api-access-k7kqn\") pod \"a4a90155-455b-4a0f-b579-98d7a261db7b\" (UID: \"a4a90155-455b-4a0f-b579-98d7a261db7b\") " Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.497623 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd29l\" (UniqueName: \"kubernetes.io/projected/7bb5e7d3-f883-4823-b53f-4fc2abd1e716-kube-api-access-sd29l\") pod \"7bb5e7d3-f883-4823-b53f-4fc2abd1e716\" (UID: \"7bb5e7d3-f883-4823-b53f-4fc2abd1e716\") " Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.497695 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swcj2\" (UniqueName: \"kubernetes.io/projected/8eef44f3-49f5-40c6-b160-a72304983227-kube-api-access-swcj2\") pod \"8eef44f3-49f5-40c6-b160-a72304983227\" (UID: \"8eef44f3-49f5-40c6-b160-a72304983227\") " Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.505218 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb5e7d3-f883-4823-b53f-4fc2abd1e716-kube-api-access-sd29l" (OuterVolumeSpecName: "kube-api-access-sd29l") pod "7bb5e7d3-f883-4823-b53f-4fc2abd1e716" (UID: "7bb5e7d3-f883-4823-b53f-4fc2abd1e716"). InnerVolumeSpecName "kube-api-access-sd29l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.505273 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8eef44f3-49f5-40c6-b160-a72304983227-kube-api-access-swcj2" (OuterVolumeSpecName: "kube-api-access-swcj2") pod "8eef44f3-49f5-40c6-b160-a72304983227" (UID: "8eef44f3-49f5-40c6-b160-a72304983227"). InnerVolumeSpecName "kube-api-access-swcj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.512347 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4a90155-455b-4a0f-b579-98d7a261db7b-kube-api-access-k7kqn" (OuterVolumeSpecName: "kube-api-access-k7kqn") pod "a4a90155-455b-4a0f-b579-98d7a261db7b" (UID: "a4a90155-455b-4a0f-b579-98d7a261db7b"). InnerVolumeSpecName "kube-api-access-k7kqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.599843 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7kqn\" (UniqueName: \"kubernetes.io/projected/a4a90155-455b-4a0f-b579-98d7a261db7b-kube-api-access-k7kqn\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.600560 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd29l\" (UniqueName: \"kubernetes.io/projected/7bb5e7d3-f883-4823-b53f-4fc2abd1e716-kube-api-access-sd29l\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.600576 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swcj2\" (UniqueName: \"kubernetes.io/projected/8eef44f3-49f5-40c6-b160-a72304983227-kube-api-access-swcj2\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.783159 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-wzgzt" event={"ID":"a4a90155-455b-4a0f-b579-98d7a261db7b","Type":"ContainerDied","Data":"870f318c9673c4779f84e1fe6c09fc9e6f88eed4da75ac62ed2a2a126b75efcc"} Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.783225 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="870f318c9673c4779f84e1fe6c09fc9e6f88eed4da75ac62ed2a2a126b75efcc" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.783286 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wzgzt" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.785478 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-f96c4" event={"ID":"8eef44f3-49f5-40c6-b160-a72304983227","Type":"ContainerDied","Data":"de3b309e08f7c66d4a6a5189b4283e1048ac8569cb6b589e255b693eb18b3ded"} Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.785508 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de3b309e08f7c66d4a6a5189b4283e1048ac8569cb6b589e255b693eb18b3ded" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.785576 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-f96c4" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.792388 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-zfgnq" event={"ID":"7bb5e7d3-f883-4823-b53f-4fc2abd1e716","Type":"ContainerDied","Data":"4b57821526f6db3369d5f444b6a23a5c51212354e612d7267daea6d33ce73f39"} Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.792439 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b57821526f6db3369d5f444b6a23a5c51212354e612d7267daea6d33ce73f39" Oct 02 21:42:21 crc kubenswrapper[4636]: I1002 21:42:21.792571 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zfgnq" Oct 02 21:42:23 crc kubenswrapper[4636]: I1002 21:42:23.118117 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:42:23 crc kubenswrapper[4636]: I1002 21:42:23.118283 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:42:23 crc kubenswrapper[4636]: I1002 21:42:23.118378 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:42:23 crc kubenswrapper[4636]: I1002 21:42:23.119566 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"04eab656ee8c7baf8b67922349c9645220b7ee9b1e0b335d95acde2eb540ab5f"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 21:42:23 crc kubenswrapper[4636]: I1002 21:42:23.119650 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://04eab656ee8c7baf8b67922349c9645220b7ee9b1e0b335d95acde2eb540ab5f" gracePeriod=600 Oct 02 21:42:23 crc kubenswrapper[4636]: I1002 21:42:23.813929 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qdzwb" event={"ID":"928272a1-ab4f-42e0-9fa1-64228b77271f","Type":"ContainerStarted","Data":"b9eaab03bcb68309b8cdd662f894fa051226141d46fba51f25ff62d2ba2cb015"} Oct 02 21:42:23 crc kubenswrapper[4636]: I1002 21:42:23.828942 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="04eab656ee8c7baf8b67922349c9645220b7ee9b1e0b335d95acde2eb540ab5f" exitCode=0 Oct 02 21:42:23 crc kubenswrapper[4636]: I1002 21:42:23.828984 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"04eab656ee8c7baf8b67922349c9645220b7ee9b1e0b335d95acde2eb540ab5f"} Oct 02 21:42:23 crc kubenswrapper[4636]: I1002 21:42:23.829022 4636 scope.go:117] "RemoveContainer" containerID="9f0c7c91411fb3c1501fae8b5053b828ebe95e83f4048c87988a5b7f03a27fd8" Oct 02 21:42:23 crc kubenswrapper[4636]: I1002 21:42:23.851273 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-qdzwb" podStartSLOduration=11.565279503 podStartE2EDuration="15.851254919s" podCreationTimestamp="2025-10-02 21:42:08 +0000 UTC" firstStartedPulling="2025-10-02 21:42:16.915550999 +0000 UTC m=+1128.238559018" lastFinishedPulling="2025-10-02 21:42:21.201526415 +0000 UTC m=+1132.524534434" observedRunningTime="2025-10-02 21:42:23.85090822 +0000 UTC m=+1135.173916249" watchObservedRunningTime="2025-10-02 21:42:23.851254919 +0000 UTC m=+1135.174262948" Oct 02 21:42:24 crc kubenswrapper[4636]: I1002 21:42:24.843042 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"8eca633a881fe1c5c0ea771b3040511454688e9d05e626e17792bacf3c7ae736"} Oct 02 21:42:26 crc kubenswrapper[4636]: I1002 21:42:26.862559 4636 generic.go:334] "Generic (PLEG): container finished" podID="928272a1-ab4f-42e0-9fa1-64228b77271f" containerID="b9eaab03bcb68309b8cdd662f894fa051226141d46fba51f25ff62d2ba2cb015" exitCode=0 Oct 02 21:42:26 crc kubenswrapper[4636]: I1002 21:42:26.862646 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qdzwb" event={"ID":"928272a1-ab4f-42e0-9fa1-64228b77271f","Type":"ContainerDied","Data":"b9eaab03bcb68309b8cdd662f894fa051226141d46fba51f25ff62d2ba2cb015"} Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.186445 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.314177 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vw4fk\" (UniqueName: \"kubernetes.io/projected/928272a1-ab4f-42e0-9fa1-64228b77271f-kube-api-access-vw4fk\") pod \"928272a1-ab4f-42e0-9fa1-64228b77271f\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.314322 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-config-data\") pod \"928272a1-ab4f-42e0-9fa1-64228b77271f\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.314371 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-combined-ca-bundle\") pod \"928272a1-ab4f-42e0-9fa1-64228b77271f\" (UID: \"928272a1-ab4f-42e0-9fa1-64228b77271f\") " Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.319646 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/928272a1-ab4f-42e0-9fa1-64228b77271f-kube-api-access-vw4fk" (OuterVolumeSpecName: "kube-api-access-vw4fk") pod "928272a1-ab4f-42e0-9fa1-64228b77271f" (UID: "928272a1-ab4f-42e0-9fa1-64228b77271f"). InnerVolumeSpecName "kube-api-access-vw4fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.345523 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "928272a1-ab4f-42e0-9fa1-64228b77271f" (UID: "928272a1-ab4f-42e0-9fa1-64228b77271f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.374468 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-config-data" (OuterVolumeSpecName: "config-data") pod "928272a1-ab4f-42e0-9fa1-64228b77271f" (UID: "928272a1-ab4f-42e0-9fa1-64228b77271f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.416067 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.416095 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/928272a1-ab4f-42e0-9fa1-64228b77271f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.416106 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vw4fk\" (UniqueName: \"kubernetes.io/projected/928272a1-ab4f-42e0-9fa1-64228b77271f-kube-api-access-vw4fk\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.498892 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-1ea2-account-create-hkf65"] Oct 02 21:42:28 crc kubenswrapper[4636]: E1002 21:42:28.499249 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eecc450-86a9-4d48-957d-5c8972c14bc6" containerName="dnsmasq-dns" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.499268 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eecc450-86a9-4d48-957d-5c8972c14bc6" containerName="dnsmasq-dns" Oct 02 21:42:28 crc kubenswrapper[4636]: E1002 21:42:28.499282 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eef44f3-49f5-40c6-b160-a72304983227" containerName="mariadb-database-create" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.499291 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eef44f3-49f5-40c6-b160-a72304983227" containerName="mariadb-database-create" Oct 02 21:42:28 crc kubenswrapper[4636]: E1002 21:42:28.499302 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eecc450-86a9-4d48-957d-5c8972c14bc6" containerName="init" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.499312 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eecc450-86a9-4d48-957d-5c8972c14bc6" containerName="init" Oct 02 21:42:28 crc kubenswrapper[4636]: E1002 21:42:28.499343 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="928272a1-ab4f-42e0-9fa1-64228b77271f" containerName="keystone-db-sync" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.499350 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="928272a1-ab4f-42e0-9fa1-64228b77271f" containerName="keystone-db-sync" Oct 02 21:42:28 crc kubenswrapper[4636]: E1002 21:42:28.499371 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4a90155-455b-4a0f-b579-98d7a261db7b" containerName="mariadb-database-create" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.499380 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4a90155-455b-4a0f-b579-98d7a261db7b" containerName="mariadb-database-create" Oct 02 21:42:28 crc kubenswrapper[4636]: E1002 21:42:28.499398 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bb5e7d3-f883-4823-b53f-4fc2abd1e716" containerName="mariadb-database-create" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.499406 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bb5e7d3-f883-4823-b53f-4fc2abd1e716" containerName="mariadb-database-create" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.499615 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="928272a1-ab4f-42e0-9fa1-64228b77271f" containerName="keystone-db-sync" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.499639 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eecc450-86a9-4d48-957d-5c8972c14bc6" containerName="dnsmasq-dns" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.499663 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4a90155-455b-4a0f-b579-98d7a261db7b" containerName="mariadb-database-create" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.499673 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bb5e7d3-f883-4823-b53f-4fc2abd1e716" containerName="mariadb-database-create" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.499687 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="8eef44f3-49f5-40c6-b160-a72304983227" containerName="mariadb-database-create" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.500349 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1ea2-account-create-hkf65" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.502438 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.509541 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1ea2-account-create-hkf65"] Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.521167 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plwtv\" (UniqueName: \"kubernetes.io/projected/ac128b50-2394-48cd-85c8-8dd21d7faca5-kube-api-access-plwtv\") pod \"cinder-1ea2-account-create-hkf65\" (UID: \"ac128b50-2394-48cd-85c8-8dd21d7faca5\") " pod="openstack/cinder-1ea2-account-create-hkf65" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.622323 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plwtv\" (UniqueName: \"kubernetes.io/projected/ac128b50-2394-48cd-85c8-8dd21d7faca5-kube-api-access-plwtv\") pod \"cinder-1ea2-account-create-hkf65\" (UID: \"ac128b50-2394-48cd-85c8-8dd21d7faca5\") " pod="openstack/cinder-1ea2-account-create-hkf65" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.651238 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plwtv\" (UniqueName: \"kubernetes.io/projected/ac128b50-2394-48cd-85c8-8dd21d7faca5-kube-api-access-plwtv\") pod \"cinder-1ea2-account-create-hkf65\" (UID: \"ac128b50-2394-48cd-85c8-8dd21d7faca5\") " pod="openstack/cinder-1ea2-account-create-hkf65" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.680935 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-2a44-account-create-t8qgl"] Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.681977 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2a44-account-create-t8qgl" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.684186 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.695612 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2a44-account-create-t8qgl"] Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.783369 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-3a73-account-create-btbnb"] Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.784577 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3a73-account-create-btbnb" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.787177 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.793324 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3a73-account-create-btbnb"] Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.816271 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1ea2-account-create-hkf65" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.828837 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhkrl\" (UniqueName: \"kubernetes.io/projected/eb93326f-3e16-453a-8b7d-1b5fb6e4b56d-kube-api-access-lhkrl\") pod \"neutron-3a73-account-create-btbnb\" (UID: \"eb93326f-3e16-453a-8b7d-1b5fb6e4b56d\") " pod="openstack/neutron-3a73-account-create-btbnb" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.828889 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v55pv\" (UniqueName: \"kubernetes.io/projected/4dc3b476-645a-40d8-a4ba-293d7b39acf9-kube-api-access-v55pv\") pod \"barbican-2a44-account-create-t8qgl\" (UID: \"4dc3b476-645a-40d8-a4ba-293d7b39acf9\") " pod="openstack/barbican-2a44-account-create-t8qgl" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.889872 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qdzwb" event={"ID":"928272a1-ab4f-42e0-9fa1-64228b77271f","Type":"ContainerDied","Data":"341c86e63e66c09d8cc6e8f63357a6cb67df9714076ca8a54f7bb20afbd09bac"} Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.889907 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="341c86e63e66c09d8cc6e8f63357a6cb67df9714076ca8a54f7bb20afbd09bac" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.889977 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qdzwb" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.968703 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhkrl\" (UniqueName: \"kubernetes.io/projected/eb93326f-3e16-453a-8b7d-1b5fb6e4b56d-kube-api-access-lhkrl\") pod \"neutron-3a73-account-create-btbnb\" (UID: \"eb93326f-3e16-453a-8b7d-1b5fb6e4b56d\") " pod="openstack/neutron-3a73-account-create-btbnb" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.969065 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v55pv\" (UniqueName: \"kubernetes.io/projected/4dc3b476-645a-40d8-a4ba-293d7b39acf9-kube-api-access-v55pv\") pod \"barbican-2a44-account-create-t8qgl\" (UID: \"4dc3b476-645a-40d8-a4ba-293d7b39acf9\") " pod="openstack/barbican-2a44-account-create-t8qgl" Oct 02 21:42:28 crc kubenswrapper[4636]: I1002 21:42:28.986347 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v55pv\" (UniqueName: \"kubernetes.io/projected/4dc3b476-645a-40d8-a4ba-293d7b39acf9-kube-api-access-v55pv\") pod \"barbican-2a44-account-create-t8qgl\" (UID: \"4dc3b476-645a-40d8-a4ba-293d7b39acf9\") " pod="openstack/barbican-2a44-account-create-t8qgl" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.002485 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2a44-account-create-t8qgl" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.004561 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhkrl\" (UniqueName: \"kubernetes.io/projected/eb93326f-3e16-453a-8b7d-1b5fb6e4b56d-kube-api-access-lhkrl\") pod \"neutron-3a73-account-create-btbnb\" (UID: \"eb93326f-3e16-453a-8b7d-1b5fb6e4b56d\") " pod="openstack/neutron-3a73-account-create-btbnb" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.104177 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3a73-account-create-btbnb" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.108994 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-x9q29"] Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.110018 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.115438 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-x9q29"] Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.123163 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.123347 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.123509 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jrtvb" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.123613 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.191330 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-gtwtl"] Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.192724 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.215123 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-gtwtl"] Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.281649 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-credential-keys\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.281992 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-config-data\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.282023 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-combined-ca-bundle\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.282072 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwx2d\" (UniqueName: \"kubernetes.io/projected/cd94e659-8a47-4277-af97-2f3b9799c738-kube-api-access-qwx2d\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.282094 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-scripts\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.282147 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-fernet-keys\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.339790 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-59849968b5-h7qdr"] Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.342025 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.349503 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.349682 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-9vlxx" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.349833 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.349955 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 02 21:42:29 crc kubenswrapper[4636]: I1002 21:42:29.377430 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-59849968b5-h7qdr"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390260 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-svc\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390298 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390326 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-config\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390351 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-config-data\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390495 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fplr\" (UniqueName: \"kubernetes.io/projected/f461d675-4ff1-45c7-b32e-10763620f9c0-kube-api-access-5fplr\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390556 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-fernet-keys\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390586 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390618 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-scripts\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390654 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-horizon-secret-key\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390676 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-logs\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390866 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-credential-keys\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390929 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-config-data\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.390999 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.391018 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4p7q\" (UniqueName: \"kubernetes.io/projected/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-kube-api-access-n4p7q\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.391052 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-combined-ca-bundle\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.391077 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwx2d\" (UniqueName: \"kubernetes.io/projected/cd94e659-8a47-4277-af97-2f3b9799c738-kube-api-access-qwx2d\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.391108 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-scripts\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.414467 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-credential-keys\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.429289 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwx2d\" (UniqueName: \"kubernetes.io/projected/cd94e659-8a47-4277-af97-2f3b9799c738-kube-api-access-qwx2d\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.434278 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-fernet-keys\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.484292 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-scripts\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.488550 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1ea2-account-create-hkf65"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.491626 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-combined-ca-bundle\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492190 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492210 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4p7q\" (UniqueName: \"kubernetes.io/projected/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-kube-api-access-n4p7q\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492262 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-svc\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492285 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492321 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-config\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492344 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-config-data\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492368 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fplr\" (UniqueName: \"kubernetes.io/projected/f461d675-4ff1-45c7-b32e-10763620f9c0-kube-api-access-5fplr\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492394 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492417 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-scripts\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492445 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-horizon-secret-key\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492466 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-logs\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492861 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-config-data\") pod \"keystone-bootstrap-x9q29\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.492876 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-logs\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.493410 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.493682 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-config\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.494191 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.494493 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.494983 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-svc\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.500221 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.502700 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.530897 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.531781 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.532363 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.532453 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.533011 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.534607 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-config-data\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.535805 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-scripts\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.538468 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-horizon-secret-key\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.544316 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fplr\" (UniqueName: \"kubernetes.io/projected/f461d675-4ff1-45c7-b32e-10763620f9c0-kube-api-access-5fplr\") pod \"dnsmasq-dns-55fff446b9-gtwtl\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.593402 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.601332 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.673498 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4p7q\" (UniqueName: \"kubernetes.io/projected/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-kube-api-access-n4p7q\") pod \"horizon-59849968b5-h7qdr\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.745630 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jrtvb" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.746697 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.763740 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-9vlxx" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.764149 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.782079 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.808960 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5b45f78fcc-6f48n"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.818651 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.837817 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-gtwtl"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.844677 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.844809 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-log-httpd\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.844829 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-config-data\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.844859 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9xxz\" (UniqueName: \"kubernetes.io/projected/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-kube-api-access-b9xxz\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.844890 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-run-httpd\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.844912 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.844941 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-scripts\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.885246 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-cf9ld"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.897533 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.901037 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rz4bg" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.901124 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.901315 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.936055 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5b45f78fcc-6f48n"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.944469 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1ea2-account-create-hkf65" event={"ID":"ac128b50-2394-48cd-85c8-8dd21d7faca5","Type":"ContainerStarted","Data":"1518114cb792bdc71c3c7b51a1d5db6ce6e583a58b586cc35f3211e9a7d14283"} Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.944535 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-cf9ld"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946025 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-log-httpd\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946076 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-config-data\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946124 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9xxz\" (UniqueName: \"kubernetes.io/projected/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-kube-api-access-b9xxz\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946165 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-logs\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946186 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-run-httpd\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946213 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946228 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-config-data\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946346 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-scripts\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946394 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946425 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-horizon-secret-key\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946483 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-scripts\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.946501 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgwzn\" (UniqueName: \"kubernetes.io/projected/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-kube-api-access-tgwzn\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.950904 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-log-httpd\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.953653 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-run-httpd\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.955393 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-6d2jz"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.958411 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.960615 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-config-data\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.964533 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.971566 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-scripts\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:29.976590 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-6d2jz"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.000343 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.006085 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9xxz\" (UniqueName: \"kubernetes.io/projected/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-kube-api-access-b9xxz\") pod \"ceilometer-0\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.050719 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-scripts\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.051055 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-horizon-secret-key\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.051089 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-585ls\" (UniqueName: \"kubernetes.io/projected/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-kube-api-access-585ls\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052074 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052123 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-scripts\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052141 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgwzn\" (UniqueName: \"kubernetes.io/projected/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-kube-api-access-tgwzn\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052161 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-logs\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052217 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-config-data\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052237 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-config\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052267 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-logs\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052285 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052304 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052328 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052347 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-config-data\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052396 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-combined-ca-bundle\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.052415 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlcmx\" (UniqueName: \"kubernetes.io/projected/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-kube-api-access-mlcmx\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.053625 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-config-data\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.056050 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-scripts\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.057134 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-logs\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.057276 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-horizon-secret-key\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.076356 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgwzn\" (UniqueName: \"kubernetes.io/projected/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-kube-api-access-tgwzn\") pod \"horizon-5b45f78fcc-6f48n\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.153904 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-logs\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.153977 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-config-data\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.153999 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-config\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.154028 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.154046 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.154067 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.154111 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-combined-ca-bundle\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.154127 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlcmx\" (UniqueName: \"kubernetes.io/projected/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-kube-api-access-mlcmx\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.154142 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-scripts\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.154168 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-585ls\" (UniqueName: \"kubernetes.io/projected/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-kube-api-access-585ls\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.154184 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.155008 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.155949 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.156667 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-config\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.156726 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-logs\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.157441 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.165867 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-combined-ca-bundle\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.167922 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.171339 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-scripts\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.171868 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-config-data\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.181377 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlcmx\" (UniqueName: \"kubernetes.io/projected/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-kube-api-access-mlcmx\") pod \"placement-db-sync-cf9ld\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.194021 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-585ls\" (UniqueName: \"kubernetes.io/projected/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-kube-api-access-585ls\") pod \"dnsmasq-dns-76fcf4b695-6d2jz\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.214585 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.214661 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.239174 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cf9ld" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.310948 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.934239 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2a44-account-create-t8qgl"] Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.939684 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.953401 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qgkxv" event={"ID":"4f7bc59e-3c13-4a51-9494-d45734d6c70c","Type":"ContainerStarted","Data":"b562cf48f873543cebea199ecdc361a425df054bff5946872490509fe7c68916"} Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.957762 4636 generic.go:334] "Generic (PLEG): container finished" podID="ac128b50-2394-48cd-85c8-8dd21d7faca5" containerID="544fdf35921ef685679cdff969b03ce3801a0c4b324d781f1abbdd79aba66d96" exitCode=0 Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.957813 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1ea2-account-create-hkf65" event={"ID":"ac128b50-2394-48cd-85c8-8dd21d7faca5","Type":"ContainerDied","Data":"544fdf35921ef685679cdff969b03ce3801a0c4b324d781f1abbdd79aba66d96"} Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.962159 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2a44-account-create-t8qgl" event={"ID":"4dc3b476-645a-40d8-a4ba-293d7b39acf9","Type":"ContainerStarted","Data":"a37cec080d8c0d6bc7808dbb6b5d16adf5cc7fe79aacfccbc839b15524542b4a"} Oct 02 21:42:30 crc kubenswrapper[4636]: I1002 21:42:30.972418 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-qgkxv" podStartSLOduration=3.117873509 podStartE2EDuration="31.97240521s" podCreationTimestamp="2025-10-02 21:41:59 +0000 UTC" firstStartedPulling="2025-10-02 21:42:00.693934962 +0000 UTC m=+1112.016942981" lastFinishedPulling="2025-10-02 21:42:29.548466663 +0000 UTC m=+1140.871474682" observedRunningTime="2025-10-02 21:42:30.968119636 +0000 UTC m=+1142.291127655" watchObservedRunningTime="2025-10-02 21:42:30.97240521 +0000 UTC m=+1142.295413229" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.162083 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-59849968b5-h7qdr"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.183850 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-69fff7c54f-wpk22"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.186533 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.205693 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-scripts\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.205780 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa458384-6249-4aa7-9134-1c0faf777dd0-logs\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.205826 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcmdc\" (UniqueName: \"kubernetes.io/projected/fa458384-6249-4aa7-9134-1c0faf777dd0-kube-api-access-pcmdc\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.205876 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa458384-6249-4aa7-9134-1c0faf777dd0-horizon-secret-key\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.205906 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-config-data\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.226301 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-69fff7c54f-wpk22"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.310908 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-scripts\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.310994 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa458384-6249-4aa7-9134-1c0faf777dd0-logs\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.311041 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcmdc\" (UniqueName: \"kubernetes.io/projected/fa458384-6249-4aa7-9134-1c0faf777dd0-kube-api-access-pcmdc\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.311081 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa458384-6249-4aa7-9134-1c0faf777dd0-horizon-secret-key\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.311106 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-config-data\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.312598 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-config-data\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.313105 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-scripts\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.313397 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa458384-6249-4aa7-9134-1c0faf777dd0-logs\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.335989 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.354812 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa458384-6249-4aa7-9134-1c0faf777dd0-horizon-secret-key\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.360075 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcmdc\" (UniqueName: \"kubernetes.io/projected/fa458384-6249-4aa7-9134-1c0faf777dd0-kube-api-access-pcmdc\") pod \"horizon-69fff7c54f-wpk22\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.378090 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-gtwtl"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.419131 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3a73-account-create-btbnb"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.433931 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-x9q29"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.467520 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.525940 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.550637 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-59849968b5-h7qdr"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.571222 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5b45f78fcc-6f48n"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.632755 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.657832 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-cf9ld"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.672712 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-6d2jz"] Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.980144 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f357c6e-1e7b-42d0-9719-396c2e9c89d2","Type":"ContainerStarted","Data":"ca6fd91b6e47661a8ffdd8a1e656897316734452234d7e2c4e2caa3d53baa0f4"} Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.985490 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" event={"ID":"f461d675-4ff1-45c7-b32e-10763620f9c0","Type":"ContainerStarted","Data":"7e925d86c229ab7dc3fd00f1ef245060d31d55622c2ef9035b4fed5f9df8151b"} Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.989296 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-x9q29" event={"ID":"cd94e659-8a47-4277-af97-2f3b9799c738","Type":"ContainerStarted","Data":"9b016d3fc43aa0958607cf0a682fe15b4abc5a7ec5976eeb632384423e28e86b"} Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.989355 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-x9q29" event={"ID":"cd94e659-8a47-4277-af97-2f3b9799c738","Type":"ContainerStarted","Data":"756091d908cde1fcfdbecb00f15f1fd32889dbd8a97650064a985db5923ca5ba"} Oct 02 21:42:31 crc kubenswrapper[4636]: I1002 21:42:31.995610 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-59849968b5-h7qdr" event={"ID":"a20af78e-6d87-4dfc-82a5-e0290aa65ec0","Type":"ContainerStarted","Data":"9caec0f45685601d587b6dd32f0f0200bcd50f11e70476c1c81e96cc3eeb276c"} Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.019604 4636 generic.go:334] "Generic (PLEG): container finished" podID="eb93326f-3e16-453a-8b7d-1b5fb6e4b56d" containerID="13815fad88a054bdb5cd40a674bbba40fe7eb211011e2f6dc28cc25214b5c7ff" exitCode=0 Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.019816 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3a73-account-create-btbnb" event={"ID":"eb93326f-3e16-453a-8b7d-1b5fb6e4b56d","Type":"ContainerDied","Data":"13815fad88a054bdb5cd40a674bbba40fe7eb211011e2f6dc28cc25214b5c7ff"} Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.019844 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3a73-account-create-btbnb" event={"ID":"eb93326f-3e16-453a-8b7d-1b5fb6e4b56d","Type":"ContainerStarted","Data":"6352af5b8b031f497252c828bcf21d261cb1673cf83a508122ac443cbc1bf250"} Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.021655 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b45f78fcc-6f48n" event={"ID":"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1","Type":"ContainerStarted","Data":"fa0563599d35f907c3e0876d3e318596e98fb7f644eb06f099b472849ddb62ae"} Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.022642 4636 generic.go:334] "Generic (PLEG): container finished" podID="4dc3b476-645a-40d8-a4ba-293d7b39acf9" containerID="a6549a0ceb46475aa404cc1ffe2cdae3622f15964572528f4deea0159a5a6562" exitCode=0 Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.022959 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2a44-account-create-t8qgl" event={"ID":"4dc3b476-645a-40d8-a4ba-293d7b39acf9","Type":"ContainerDied","Data":"a6549a0ceb46475aa404cc1ffe2cdae3622f15964572528f4deea0159a5a6562"} Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.033491 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-x9q29" podStartSLOduration=3.033472278 podStartE2EDuration="3.033472278s" podCreationTimestamp="2025-10-02 21:42:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:42:32.020190723 +0000 UTC m=+1143.343198742" watchObservedRunningTime="2025-10-02 21:42:32.033472278 +0000 UTC m=+1143.356480297" Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.035761 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" event={"ID":"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31","Type":"ContainerStarted","Data":"1bb210b640e3ae152fdc092e307286b651f5860d65d382db4f092c37f834a20e"} Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.051195 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cf9ld" event={"ID":"70ce2186-3a61-4f36-a51b-52a7bfeabdf1","Type":"ContainerStarted","Data":"a1091b4f2eaeac62c38c62852bc266df247c7908979c2a0a9933304632a5b91c"} Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.227162 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-69fff7c54f-wpk22"] Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.445088 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1ea2-account-create-hkf65" Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.566839 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plwtv\" (UniqueName: \"kubernetes.io/projected/ac128b50-2394-48cd-85c8-8dd21d7faca5-kube-api-access-plwtv\") pod \"ac128b50-2394-48cd-85c8-8dd21d7faca5\" (UID: \"ac128b50-2394-48cd-85c8-8dd21d7faca5\") " Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.572407 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac128b50-2394-48cd-85c8-8dd21d7faca5-kube-api-access-plwtv" (OuterVolumeSpecName: "kube-api-access-plwtv") pod "ac128b50-2394-48cd-85c8-8dd21d7faca5" (UID: "ac128b50-2394-48cd-85c8-8dd21d7faca5"). InnerVolumeSpecName "kube-api-access-plwtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:32 crc kubenswrapper[4636]: I1002 21:42:32.681909 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plwtv\" (UniqueName: \"kubernetes.io/projected/ac128b50-2394-48cd-85c8-8dd21d7faca5-kube-api-access-plwtv\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.082266 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1ea2-account-create-hkf65" event={"ID":"ac128b50-2394-48cd-85c8-8dd21d7faca5","Type":"ContainerDied","Data":"1518114cb792bdc71c3c7b51a1d5db6ce6e583a58b586cc35f3211e9a7d14283"} Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.082313 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1518114cb792bdc71c3c7b51a1d5db6ce6e583a58b586cc35f3211e9a7d14283" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.082287 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1ea2-account-create-hkf65" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.086373 4636 generic.go:334] "Generic (PLEG): container finished" podID="76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" containerID="51603bc73410f3c4b241be723b0bfef7ae80d86a0eefe88cc2bf51151c85f58d" exitCode=0 Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.086474 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" event={"ID":"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31","Type":"ContainerDied","Data":"51603bc73410f3c4b241be723b0bfef7ae80d86a0eefe88cc2bf51151c85f58d"} Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.094037 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69fff7c54f-wpk22" event={"ID":"fa458384-6249-4aa7-9134-1c0faf777dd0","Type":"ContainerStarted","Data":"c6276999d45978c74680f4507d27d6bff1ba7a3b36d4a9d1b9b54264cebf8103"} Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.097147 4636 generic.go:334] "Generic (PLEG): container finished" podID="f461d675-4ff1-45c7-b32e-10763620f9c0" containerID="3c8d2e1f60a50dcc72f0deebf15bbfb8d16ebcdf2f75fcf28fff1593a376dd81" exitCode=0 Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.097418 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" event={"ID":"f461d675-4ff1-45c7-b32e-10763620f9c0","Type":"ContainerDied","Data":"3c8d2e1f60a50dcc72f0deebf15bbfb8d16ebcdf2f75fcf28fff1593a376dd81"} Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.727865 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3a73-account-create-btbnb" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.740120 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhkrl\" (UniqueName: \"kubernetes.io/projected/eb93326f-3e16-453a-8b7d-1b5fb6e4b56d-kube-api-access-lhkrl\") pod \"eb93326f-3e16-453a-8b7d-1b5fb6e4b56d\" (UID: \"eb93326f-3e16-453a-8b7d-1b5fb6e4b56d\") " Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.750453 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb93326f-3e16-453a-8b7d-1b5fb6e4b56d-kube-api-access-lhkrl" (OuterVolumeSpecName: "kube-api-access-lhkrl") pod "eb93326f-3e16-453a-8b7d-1b5fb6e4b56d" (UID: "eb93326f-3e16-453a-8b7d-1b5fb6e4b56d"). InnerVolumeSpecName "kube-api-access-lhkrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.844210 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhkrl\" (UniqueName: \"kubernetes.io/projected/eb93326f-3e16-453a-8b7d-1b5fb6e4b56d-kube-api-access-lhkrl\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.849579 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.875568 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2a44-account-create-t8qgl" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.945732 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fplr\" (UniqueName: \"kubernetes.io/projected/f461d675-4ff1-45c7-b32e-10763620f9c0-kube-api-access-5fplr\") pod \"f461d675-4ff1-45c7-b32e-10763620f9c0\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.945979 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-svc\") pod \"f461d675-4ff1-45c7-b32e-10763620f9c0\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.946018 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-sb\") pod \"f461d675-4ff1-45c7-b32e-10763620f9c0\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.946037 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-config\") pod \"f461d675-4ff1-45c7-b32e-10763620f9c0\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.946066 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v55pv\" (UniqueName: \"kubernetes.io/projected/4dc3b476-645a-40d8-a4ba-293d7b39acf9-kube-api-access-v55pv\") pod \"4dc3b476-645a-40d8-a4ba-293d7b39acf9\" (UID: \"4dc3b476-645a-40d8-a4ba-293d7b39acf9\") " Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.946095 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-swift-storage-0\") pod \"f461d675-4ff1-45c7-b32e-10763620f9c0\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.946145 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-nb\") pod \"f461d675-4ff1-45c7-b32e-10763620f9c0\" (UID: \"f461d675-4ff1-45c7-b32e-10763620f9c0\") " Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.952177 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f461d675-4ff1-45c7-b32e-10763620f9c0-kube-api-access-5fplr" (OuterVolumeSpecName: "kube-api-access-5fplr") pod "f461d675-4ff1-45c7-b32e-10763620f9c0" (UID: "f461d675-4ff1-45c7-b32e-10763620f9c0"). InnerVolumeSpecName "kube-api-access-5fplr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.962752 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dc3b476-645a-40d8-a4ba-293d7b39acf9-kube-api-access-v55pv" (OuterVolumeSpecName: "kube-api-access-v55pv") pod "4dc3b476-645a-40d8-a4ba-293d7b39acf9" (UID: "4dc3b476-645a-40d8-a4ba-293d7b39acf9"). InnerVolumeSpecName "kube-api-access-v55pv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.973494 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f461d675-4ff1-45c7-b32e-10763620f9c0" (UID: "f461d675-4ff1-45c7-b32e-10763620f9c0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.976084 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f461d675-4ff1-45c7-b32e-10763620f9c0" (UID: "f461d675-4ff1-45c7-b32e-10763620f9c0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.981683 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f461d675-4ff1-45c7-b32e-10763620f9c0" (UID: "f461d675-4ff1-45c7-b32e-10763620f9c0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.983965 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f461d675-4ff1-45c7-b32e-10763620f9c0" (UID: "f461d675-4ff1-45c7-b32e-10763620f9c0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:33 crc kubenswrapper[4636]: I1002 21:42:33.985052 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-config" (OuterVolumeSpecName: "config") pod "f461d675-4ff1-45c7-b32e-10763620f9c0" (UID: "f461d675-4ff1-45c7-b32e-10763620f9c0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.035876 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-llj5j"] Oct 02 21:42:34 crc kubenswrapper[4636]: E1002 21:42:34.036232 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f461d675-4ff1-45c7-b32e-10763620f9c0" containerName="init" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.036249 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="f461d675-4ff1-45c7-b32e-10763620f9c0" containerName="init" Oct 02 21:42:34 crc kubenswrapper[4636]: E1002 21:42:34.036264 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb93326f-3e16-453a-8b7d-1b5fb6e4b56d" containerName="mariadb-account-create" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.036273 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb93326f-3e16-453a-8b7d-1b5fb6e4b56d" containerName="mariadb-account-create" Oct 02 21:42:34 crc kubenswrapper[4636]: E1002 21:42:34.036296 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc3b476-645a-40d8-a4ba-293d7b39acf9" containerName="mariadb-account-create" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.036304 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc3b476-645a-40d8-a4ba-293d7b39acf9" containerName="mariadb-account-create" Oct 02 21:42:34 crc kubenswrapper[4636]: E1002 21:42:34.036321 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac128b50-2394-48cd-85c8-8dd21d7faca5" containerName="mariadb-account-create" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.036331 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac128b50-2394-48cd-85c8-8dd21d7faca5" containerName="mariadb-account-create" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.036493 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="f461d675-4ff1-45c7-b32e-10763620f9c0" containerName="init" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.036527 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac128b50-2394-48cd-85c8-8dd21d7faca5" containerName="mariadb-account-create" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.036540 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb93326f-3e16-453a-8b7d-1b5fb6e4b56d" containerName="mariadb-account-create" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.036551 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dc3b476-645a-40d8-a4ba-293d7b39acf9" containerName="mariadb-account-create" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.037434 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.039292 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.039366 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-q8t6j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.039547 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.048723 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-db-sync-config-data\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.048837 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-combined-ca-bundle\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.048903 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-config-data\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.048934 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-scripts\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.048969 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77c82\" (UniqueName: \"kubernetes.io/projected/e2e7e09e-0db9-4149-83d9-80163c11d203-kube-api-access-77c82\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.049034 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2e7e09e-0db9-4149-83d9-80163c11d203-etc-machine-id\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.049107 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.049118 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.049130 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.049139 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v55pv\" (UniqueName: \"kubernetes.io/projected/4dc3b476-645a-40d8-a4ba-293d7b39acf9-kube-api-access-v55pv\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.049148 4636 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.049156 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f461d675-4ff1-45c7-b32e-10763620f9c0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.049164 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fplr\" (UniqueName: \"kubernetes.io/projected/f461d675-4ff1-45c7-b32e-10763620f9c0-kube-api-access-5fplr\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.053891 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-llj5j"] Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.118390 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" event={"ID":"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31","Type":"ContainerStarted","Data":"234d94949936a75c5197c9da185e310e6b461d43c3a68b46f2d78f9a4ac34e7e"} Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.119440 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.122996 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3a73-account-create-btbnb" event={"ID":"eb93326f-3e16-453a-8b7d-1b5fb6e4b56d","Type":"ContainerDied","Data":"6352af5b8b031f497252c828bcf21d261cb1673cf83a508122ac443cbc1bf250"} Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.123024 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6352af5b8b031f497252c828bcf21d261cb1673cf83a508122ac443cbc1bf250" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.123073 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3a73-account-create-btbnb" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.129390 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" event={"ID":"f461d675-4ff1-45c7-b32e-10763620f9c0","Type":"ContainerDied","Data":"7e925d86c229ab7dc3fd00f1ef245060d31d55622c2ef9035b4fed5f9df8151b"} Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.129430 4636 scope.go:117] "RemoveContainer" containerID="3c8d2e1f60a50dcc72f0deebf15bbfb8d16ebcdf2f75fcf28fff1593a376dd81" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.129526 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-gtwtl" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.144763 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" podStartSLOduration=5.144729905 podStartE2EDuration="5.144729905s" podCreationTimestamp="2025-10-02 21:42:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:42:34.140228725 +0000 UTC m=+1145.463236734" watchObservedRunningTime="2025-10-02 21:42:34.144729905 +0000 UTC m=+1145.467737924" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.150855 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2e7e09e-0db9-4149-83d9-80163c11d203-etc-machine-id\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.151162 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-db-sync-config-data\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.151190 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-combined-ca-bundle\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.151262 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-config-data\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.151295 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-scripts\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.151327 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77c82\" (UniqueName: \"kubernetes.io/projected/e2e7e09e-0db9-4149-83d9-80163c11d203-kube-api-access-77c82\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.151835 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2e7e09e-0db9-4149-83d9-80163c11d203-etc-machine-id\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.162692 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-config-data\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.164641 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-scripts\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.174783 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-db-sync-config-data\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.176270 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2a44-account-create-t8qgl" event={"ID":"4dc3b476-645a-40d8-a4ba-293d7b39acf9","Type":"ContainerDied","Data":"a37cec080d8c0d6bc7808dbb6b5d16adf5cc7fe79aacfccbc839b15524542b4a"} Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.176315 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a37cec080d8c0d6bc7808dbb6b5d16adf5cc7fe79aacfccbc839b15524542b4a" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.176373 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2a44-account-create-t8qgl" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.180415 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-combined-ca-bundle\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.182652 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77c82\" (UniqueName: \"kubernetes.io/projected/e2e7e09e-0db9-4149-83d9-80163c11d203-kube-api-access-77c82\") pod \"cinder-db-sync-llj5j\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.329803 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-gtwtl"] Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.343100 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-gtwtl"] Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.365569 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-llj5j" Oct 02 21:42:34 crc kubenswrapper[4636]: I1002 21:42:34.853195 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-llj5j"] Oct 02 21:42:34 crc kubenswrapper[4636]: W1002 21:42:34.879729 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2e7e09e_0db9_4149_83d9_80163c11d203.slice/crio-9d9da945c3bfef0e9e6b1cd80029bb9812b6040bd099e95b94e29ae1bcbe1b58 WatchSource:0}: Error finding container 9d9da945c3bfef0e9e6b1cd80029bb9812b6040bd099e95b94e29ae1bcbe1b58: Status 404 returned error can't find the container with id 9d9da945c3bfef0e9e6b1cd80029bb9812b6040bd099e95b94e29ae1bcbe1b58 Oct 02 21:42:35 crc kubenswrapper[4636]: I1002 21:42:35.189719 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-llj5j" event={"ID":"e2e7e09e-0db9-4149-83d9-80163c11d203","Type":"ContainerStarted","Data":"9d9da945c3bfef0e9e6b1cd80029bb9812b6040bd099e95b94e29ae1bcbe1b58"} Oct 02 21:42:35 crc kubenswrapper[4636]: I1002 21:42:35.614417 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f461d675-4ff1-45c7-b32e-10763620f9c0" path="/var/lib/kubelet/pods/f461d675-4ff1-45c7-b32e-10763620f9c0/volumes" Oct 02 21:42:37 crc kubenswrapper[4636]: I1002 21:42:37.937186 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5b45f78fcc-6f48n"] Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.006247 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6897cb4484-tthsj"] Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.010405 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.016764 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.033440 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6897cb4484-tthsj"] Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.084385 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-69fff7c54f-wpk22"] Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.145622 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-combined-ca-bundle\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.145669 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-secret-key\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.145774 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-logs\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.145799 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbj65\" (UniqueName: \"kubernetes.io/projected/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-kube-api-access-xbj65\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.145816 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-tls-certs\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.145860 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-scripts\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.145880 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-config-data\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.158154 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7646d88f4d-85mgl"] Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.159513 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.184658 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7646d88f4d-85mgl"] Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.254845 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-config-data\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.254896 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/65063729-cda3-488f-8e94-364db15e2d2d-config-data\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.254923 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65063729-cda3-488f-8e94-364db15e2d2d-scripts\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.254947 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-combined-ca-bundle\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.254964 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tpq9\" (UniqueName: \"kubernetes.io/projected/65063729-cda3-488f-8e94-364db15e2d2d-kube-api-access-4tpq9\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.254987 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-secret-key\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.255005 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65063729-cda3-488f-8e94-364db15e2d2d-combined-ca-bundle\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.255060 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/65063729-cda3-488f-8e94-364db15e2d2d-horizon-tls-certs\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.255096 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-logs\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.255119 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbj65\" (UniqueName: \"kubernetes.io/projected/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-kube-api-access-xbj65\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.255134 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-tls-certs\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.255167 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65063729-cda3-488f-8e94-364db15e2d2d-logs\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.255187 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/65063729-cda3-488f-8e94-364db15e2d2d-horizon-secret-key\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.255209 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-scripts\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.255821 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-scripts\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.256946 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-config-data\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.264246 4636 generic.go:334] "Generic (PLEG): container finished" podID="cd94e659-8a47-4277-af97-2f3b9799c738" containerID="9b016d3fc43aa0958607cf0a682fe15b4abc5a7ec5976eeb632384423e28e86b" exitCode=0 Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.264290 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-x9q29" event={"ID":"cd94e659-8a47-4277-af97-2f3b9799c738","Type":"ContainerDied","Data":"9b016d3fc43aa0958607cf0a682fe15b4abc5a7ec5976eeb632384423e28e86b"} Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.274277 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-secret-key\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.275901 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-logs\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.276809 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-combined-ca-bundle\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.284495 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-tls-certs\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.299639 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbj65\" (UniqueName: \"kubernetes.io/projected/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-kube-api-access-xbj65\") pod \"horizon-6897cb4484-tthsj\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.356314 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/65063729-cda3-488f-8e94-364db15e2d2d-horizon-tls-certs\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.356453 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65063729-cda3-488f-8e94-364db15e2d2d-logs\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.356476 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/65063729-cda3-488f-8e94-364db15e2d2d-horizon-secret-key\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.356510 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/65063729-cda3-488f-8e94-364db15e2d2d-config-data\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.356547 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65063729-cda3-488f-8e94-364db15e2d2d-scripts\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.356565 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tpq9\" (UniqueName: \"kubernetes.io/projected/65063729-cda3-488f-8e94-364db15e2d2d-kube-api-access-4tpq9\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.356586 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65063729-cda3-488f-8e94-364db15e2d2d-combined-ca-bundle\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.367130 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65063729-cda3-488f-8e94-364db15e2d2d-logs\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.368478 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/65063729-cda3-488f-8e94-364db15e2d2d-config-data\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.369706 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.372417 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/65063729-cda3-488f-8e94-364db15e2d2d-horizon-tls-certs\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.382831 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65063729-cda3-488f-8e94-364db15e2d2d-combined-ca-bundle\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.383402 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65063729-cda3-488f-8e94-364db15e2d2d-scripts\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.394821 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/65063729-cda3-488f-8e94-364db15e2d2d-horizon-secret-key\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.397117 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tpq9\" (UniqueName: \"kubernetes.io/projected/65063729-cda3-488f-8e94-364db15e2d2d-kube-api-access-4tpq9\") pod \"horizon-7646d88f4d-85mgl\" (UID: \"65063729-cda3-488f-8e94-364db15e2d2d\") " pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.474965 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.943401 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-7fdh9"] Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.944718 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.952297 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-7cqzv" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.952499 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.971886 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-db-sync-config-data\") pod \"barbican-db-sync-7fdh9\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.971925 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7n95\" (UniqueName: \"kubernetes.io/projected/11ba2a28-597f-4252-8922-6360d60a5c81-kube-api-access-n7n95\") pod \"barbican-db-sync-7fdh9\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.971944 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-combined-ca-bundle\") pod \"barbican-db-sync-7fdh9\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:42:38 crc kubenswrapper[4636]: I1002 21:42:38.980283 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7fdh9"] Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.073058 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-db-sync-config-data\") pod \"barbican-db-sync-7fdh9\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.073103 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7n95\" (UniqueName: \"kubernetes.io/projected/11ba2a28-597f-4252-8922-6360d60a5c81-kube-api-access-n7n95\") pod \"barbican-db-sync-7fdh9\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.073123 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-combined-ca-bundle\") pod \"barbican-db-sync-7fdh9\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.076857 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-db-sync-config-data\") pod \"barbican-db-sync-7fdh9\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.077636 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-combined-ca-bundle\") pod \"barbican-db-sync-7fdh9\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.087789 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7n95\" (UniqueName: \"kubernetes.io/projected/11ba2a28-597f-4252-8922-6360d60a5c81-kube-api-access-n7n95\") pod \"barbican-db-sync-7fdh9\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.183791 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-4vrl8"] Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.185129 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.187857 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.188977 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-cwlxm" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.188985 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.196814 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4vrl8"] Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.272530 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.379277 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-combined-ca-bundle\") pod \"neutron-db-sync-4vrl8\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.379340 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-config\") pod \"neutron-db-sync-4vrl8\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.379392 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh42r\" (UniqueName: \"kubernetes.io/projected/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-kube-api-access-gh42r\") pod \"neutron-db-sync-4vrl8\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.481039 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-combined-ca-bundle\") pod \"neutron-db-sync-4vrl8\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.481436 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-config\") pod \"neutron-db-sync-4vrl8\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.481478 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh42r\" (UniqueName: \"kubernetes.io/projected/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-kube-api-access-gh42r\") pod \"neutron-db-sync-4vrl8\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.491673 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-config\") pod \"neutron-db-sync-4vrl8\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.503376 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-combined-ca-bundle\") pod \"neutron-db-sync-4vrl8\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.505586 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh42r\" (UniqueName: \"kubernetes.io/projected/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-kube-api-access-gh42r\") pod \"neutron-db-sync-4vrl8\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:42:39 crc kubenswrapper[4636]: I1002 21:42:39.803241 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:42:40 crc kubenswrapper[4636]: I1002 21:42:40.313019 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:42:40 crc kubenswrapper[4636]: I1002 21:42:40.364661 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-t4n68"] Oct 02 21:42:40 crc kubenswrapper[4636]: I1002 21:42:40.365084 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" podUID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerName="dnsmasq-dns" containerID="cri-o://f0ca5ffde1b510078fe1d9abb2180a0e8ee1ac0a46c7a84e2216e2d9ac6a0765" gracePeriod=10 Oct 02 21:42:41 crc kubenswrapper[4636]: I1002 21:42:41.300367 4636 generic.go:334] "Generic (PLEG): container finished" podID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerID="f0ca5ffde1b510078fe1d9abb2180a0e8ee1ac0a46c7a84e2216e2d9ac6a0765" exitCode=0 Oct 02 21:42:41 crc kubenswrapper[4636]: I1002 21:42:41.300412 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" event={"ID":"d0d73ce4-4765-4ef0-82bd-e07875e04521","Type":"ContainerDied","Data":"f0ca5ffde1b510078fe1d9abb2180a0e8ee1ac0a46c7a84e2216e2d9ac6a0765"} Oct 02 21:42:42 crc kubenswrapper[4636]: I1002 21:42:42.210016 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" podUID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.123:5353: connect: connection refused" Oct 02 21:42:43 crc kubenswrapper[4636]: I1002 21:42:43.318564 4636 generic.go:334] "Generic (PLEG): container finished" podID="4f7bc59e-3c13-4a51-9494-d45734d6c70c" containerID="b562cf48f873543cebea199ecdc361a425df054bff5946872490509fe7c68916" exitCode=0 Oct 02 21:42:43 crc kubenswrapper[4636]: I1002 21:42:43.318827 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qgkxv" event={"ID":"4f7bc59e-3c13-4a51-9494-d45734d6c70c","Type":"ContainerDied","Data":"b562cf48f873543cebea199ecdc361a425df054bff5946872490509fe7c68916"} Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.266604 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.346673 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-x9q29" event={"ID":"cd94e659-8a47-4277-af97-2f3b9799c738","Type":"ContainerDied","Data":"756091d908cde1fcfdbecb00f15f1fd32889dbd8a97650064a985db5923ca5ba"} Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.346708 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="756091d908cde1fcfdbecb00f15f1fd32889dbd8a97650064a985db5923ca5ba" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.346730 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-x9q29" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.441809 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-combined-ca-bundle\") pod \"cd94e659-8a47-4277-af97-2f3b9799c738\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.442122 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-config-data\") pod \"cd94e659-8a47-4277-af97-2f3b9799c738\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.442150 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwx2d\" (UniqueName: \"kubernetes.io/projected/cd94e659-8a47-4277-af97-2f3b9799c738-kube-api-access-qwx2d\") pod \"cd94e659-8a47-4277-af97-2f3b9799c738\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.442276 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-scripts\") pod \"cd94e659-8a47-4277-af97-2f3b9799c738\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.442862 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-credential-keys\") pod \"cd94e659-8a47-4277-af97-2f3b9799c738\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.442909 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-fernet-keys\") pod \"cd94e659-8a47-4277-af97-2f3b9799c738\" (UID: \"cd94e659-8a47-4277-af97-2f3b9799c738\") " Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.447167 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd94e659-8a47-4277-af97-2f3b9799c738-kube-api-access-qwx2d" (OuterVolumeSpecName: "kube-api-access-qwx2d") pod "cd94e659-8a47-4277-af97-2f3b9799c738" (UID: "cd94e659-8a47-4277-af97-2f3b9799c738"). InnerVolumeSpecName "kube-api-access-qwx2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.447635 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-scripts" (OuterVolumeSpecName: "scripts") pod "cd94e659-8a47-4277-af97-2f3b9799c738" (UID: "cd94e659-8a47-4277-af97-2f3b9799c738"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.451185 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "cd94e659-8a47-4277-af97-2f3b9799c738" (UID: "cd94e659-8a47-4277-af97-2f3b9799c738"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.452799 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "cd94e659-8a47-4277-af97-2f3b9799c738" (UID: "cd94e659-8a47-4277-af97-2f3b9799c738"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.470177 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-config-data" (OuterVolumeSpecName: "config-data") pod "cd94e659-8a47-4277-af97-2f3b9799c738" (UID: "cd94e659-8a47-4277-af97-2f3b9799c738"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.480834 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd94e659-8a47-4277-af97-2f3b9799c738" (UID: "cd94e659-8a47-4277-af97-2f3b9799c738"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.547853 4636 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.547884 4636 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.547893 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.547904 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.547912 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwx2d\" (UniqueName: \"kubernetes.io/projected/cd94e659-8a47-4277-af97-2f3b9799c738-kube-api-access-qwx2d\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:46 crc kubenswrapper[4636]: I1002 21:42:46.547920 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd94e659-8a47-4277-af97-2f3b9799c738-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.209963 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" podUID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.123:5353: connect: connection refused" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.372193 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-x9q29"] Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.390773 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-x9q29"] Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.457799 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-q5lqg"] Oct 02 21:42:47 crc kubenswrapper[4636]: E1002 21:42:47.458344 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd94e659-8a47-4277-af97-2f3b9799c738" containerName="keystone-bootstrap" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.458361 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd94e659-8a47-4277-af97-2f3b9799c738" containerName="keystone-bootstrap" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.458634 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd94e659-8a47-4277-af97-2f3b9799c738" containerName="keystone-bootstrap" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.459302 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.460731 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.463074 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.468073 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q5lqg"] Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.468463 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jrtvb" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.468778 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.566145 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-scripts\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.566267 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnrdk\" (UniqueName: \"kubernetes.io/projected/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-kube-api-access-jnrdk\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.566295 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-fernet-keys\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.566332 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-combined-ca-bundle\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.566362 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-credential-keys\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.566397 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-config-data\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.614309 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd94e659-8a47-4277-af97-2f3b9799c738" path="/var/lib/kubelet/pods/cd94e659-8a47-4277-af97-2f3b9799c738/volumes" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.668398 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-combined-ca-bundle\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.668484 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-credential-keys\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.668527 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-config-data\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.668572 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-scripts\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.668623 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnrdk\" (UniqueName: \"kubernetes.io/projected/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-kube-api-access-jnrdk\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.668644 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-fernet-keys\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.674517 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-credential-keys\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.675151 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-fernet-keys\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.675334 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-config-data\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.676538 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-combined-ca-bundle\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.676859 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-scripts\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.699596 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnrdk\" (UniqueName: \"kubernetes.io/projected/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-kube-api-access-jnrdk\") pod \"keystone-bootstrap-q5lqg\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:47 crc kubenswrapper[4636]: I1002 21:42:47.796129 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:42:48 crc kubenswrapper[4636]: E1002 21:42:48.016565 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Oct 02 21:42:48 crc kubenswrapper[4636]: E1002 21:42:48.016781 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mlcmx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-cf9ld_openstack(70ce2186-3a61-4f36-a51b-52a7bfeabdf1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:42:48 crc kubenswrapper[4636]: E1002 21:42:48.017973 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-cf9ld" podUID="70ce2186-3a61-4f36-a51b-52a7bfeabdf1" Oct 02 21:42:48 crc kubenswrapper[4636]: E1002 21:42:48.048158 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 02 21:42:48 crc kubenswrapper[4636]: E1002 21:42:48.048271 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nbfhfch57ch7h674h65h587h6h668h5c4h58ch5fbh686hcfh5cbhdfh56h54dh5fchb8h5dbh9bh5c7h5b8h59bh8dh656h688h544h655h8bh669q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pcmdc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-69fff7c54f-wpk22_openstack(fa458384-6249-4aa7-9134-1c0faf777dd0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:42:48 crc kubenswrapper[4636]: E1002 21:42:48.051274 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-69fff7c54f-wpk22" podUID="fa458384-6249-4aa7-9134-1c0faf777dd0" Oct 02 21:42:48 crc kubenswrapper[4636]: E1002 21:42:48.069147 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 02 21:42:48 crc kubenswrapper[4636]: E1002 21:42:48.069308 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n654h669h68hdch69h547h5b4h86hcchfdh9hf7hb5h5f5h5dch698h67hddhcdh9dhb8h55bh544h65ch58h5dh5c6h558h89h544h74h86q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tgwzn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-5b45f78fcc-6f48n_openstack(81419bdd-5cb5-4d6b-83d8-81cef7faf8a1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:42:48 crc kubenswrapper[4636]: E1002 21:42:48.071336 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-5b45f78fcc-6f48n" podUID="81419bdd-5cb5-4d6b-83d8-81cef7faf8a1" Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.122222 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.280164 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4znw\" (UniqueName: \"kubernetes.io/projected/4f7bc59e-3c13-4a51-9494-d45734d6c70c-kube-api-access-w4znw\") pod \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.280444 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-db-sync-config-data\") pod \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.280507 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-combined-ca-bundle\") pod \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.280612 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-config-data\") pod \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\" (UID: \"4f7bc59e-3c13-4a51-9494-d45734d6c70c\") " Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.287631 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4f7bc59e-3c13-4a51-9494-d45734d6c70c" (UID: "4f7bc59e-3c13-4a51-9494-d45734d6c70c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.303273 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f7bc59e-3c13-4a51-9494-d45734d6c70c-kube-api-access-w4znw" (OuterVolumeSpecName: "kube-api-access-w4znw") pod "4f7bc59e-3c13-4a51-9494-d45734d6c70c" (UID: "4f7bc59e-3c13-4a51-9494-d45734d6c70c"). InnerVolumeSpecName "kube-api-access-w4znw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.306903 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f7bc59e-3c13-4a51-9494-d45734d6c70c" (UID: "4f7bc59e-3c13-4a51-9494-d45734d6c70c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.349338 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-config-data" (OuterVolumeSpecName: "config-data") pod "4f7bc59e-3c13-4a51-9494-d45734d6c70c" (UID: "4f7bc59e-3c13-4a51-9494-d45734d6c70c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.378450 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qgkxv" event={"ID":"4f7bc59e-3c13-4a51-9494-d45734d6c70c","Type":"ContainerDied","Data":"2260e9c53e016c46c979e254cad13142e4490fa281de5634276f82d67fe34c5f"} Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.378499 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2260e9c53e016c46c979e254cad13142e4490fa281de5634276f82d67fe34c5f" Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.378564 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qgkxv" Oct 02 21:42:48 crc kubenswrapper[4636]: E1002 21:42:48.380260 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-cf9ld" podUID="70ce2186-3a61-4f36-a51b-52a7bfeabdf1" Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.382144 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.382171 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.382182 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4znw\" (UniqueName: \"kubernetes.io/projected/4f7bc59e-3c13-4a51-9494-d45734d6c70c-kube-api-access-w4znw\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:48 crc kubenswrapper[4636]: I1002 21:42:48.382192 4636 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f7bc59e-3c13-4a51-9494-d45734d6c70c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.653215 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-v2w52"] Oct 02 21:42:49 crc kubenswrapper[4636]: E1002 21:42:49.653939 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7bc59e-3c13-4a51-9494-d45734d6c70c" containerName="glance-db-sync" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.653952 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7bc59e-3c13-4a51-9494-d45734d6c70c" containerName="glance-db-sync" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.654153 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f7bc59e-3c13-4a51-9494-d45734d6c70c" containerName="glance-db-sync" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.658660 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.712718 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-v2w52"] Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.807166 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.807225 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.807264 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4bwh\" (UniqueName: \"kubernetes.io/projected/f6ae6eac-c5ec-4476-83e2-2b5532974b55-kube-api-access-v4bwh\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.807317 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.807371 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.807394 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-config\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.909090 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.909142 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-config\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.909185 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.909205 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.909235 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4bwh\" (UniqueName: \"kubernetes.io/projected/f6ae6eac-c5ec-4476-83e2-2b5532974b55-kube-api-access-v4bwh\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.909277 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.910037 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.910526 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.911033 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-config\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.911507 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.911993 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:49 crc kubenswrapper[4636]: I1002 21:42:49.937434 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4bwh\" (UniqueName: \"kubernetes.io/projected/f6ae6eac-c5ec-4476-83e2-2b5532974b55-kube-api-access-v4bwh\") pod \"dnsmasq-dns-8b5c85b87-v2w52\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.002175 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.510265 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.512405 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.514545 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.514670 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-f8hzc" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.514808 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.529716 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.641878 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq5d5\" (UniqueName: \"kubernetes.io/projected/737df75e-16d7-4397-87ea-dc83ab9a9503-kube-api-access-pq5d5\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.641943 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-scripts\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.641970 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.641991 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.642065 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-config-data\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.642105 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.642132 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-logs\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.743242 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-config-data\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.743338 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.743370 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-logs\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.743389 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq5d5\" (UniqueName: \"kubernetes.io/projected/737df75e-16d7-4397-87ea-dc83ab9a9503-kube-api-access-pq5d5\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.743442 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-scripts\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.743468 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.743487 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.744482 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.744555 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-logs\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.744783 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.748999 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-scripts\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.754344 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-config-data\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.765275 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.772013 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq5d5\" (UniqueName: \"kubernetes.io/projected/737df75e-16d7-4397-87ea-dc83ab9a9503-kube-api-access-pq5d5\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.795384 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.841981 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.854173 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.855458 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.858039 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.874159 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.945850 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kr9r\" (UniqueName: \"kubernetes.io/projected/d83f22bd-9e6c-466a-9303-35a68c588b62-kube-api-access-9kr9r\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.946090 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.946171 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.946246 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-logs\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.946356 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.946439 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:50 crc kubenswrapper[4636]: I1002 21:42:50.946579 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.048079 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kr9r\" (UniqueName: \"kubernetes.io/projected/d83f22bd-9e6c-466a-9303-35a68c588b62-kube-api-access-9kr9r\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.048123 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.048143 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.048170 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-logs\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.048201 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.048229 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.048282 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.048456 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.049104 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.049195 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-logs\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.055561 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.056065 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.061108 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.068966 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kr9r\" (UniqueName: \"kubernetes.io/projected/d83f22bd-9e6c-466a-9303-35a68c588b62-kube-api-access-9kr9r\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.075388 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.189051 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.801076 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:42:51 crc kubenswrapper[4636]: I1002 21:42:51.883467 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:42:57 crc kubenswrapper[4636]: I1002 21:42:57.210004 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" podUID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.123:5353: i/o timeout" Oct 02 21:42:57 crc kubenswrapper[4636]: I1002 21:42:57.210780 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.031533 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.046055 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.073279 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187332 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgwzn\" (UniqueName: \"kubernetes.io/projected/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-kube-api-access-tgwzn\") pod \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187411 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-logs\") pod \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187444 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-config-data\") pod \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187478 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-horizon-secret-key\") pod \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187506 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-swift-storage-0\") pod \"d0d73ce4-4765-4ef0-82bd-e07875e04521\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187532 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-svc\") pod \"d0d73ce4-4765-4ef0-82bd-e07875e04521\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187572 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-sb\") pod \"d0d73ce4-4765-4ef0-82bd-e07875e04521\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187613 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-config\") pod \"d0d73ce4-4765-4ef0-82bd-e07875e04521\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187645 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-scripts\") pod \"fa458384-6249-4aa7-9134-1c0faf777dd0\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187689 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa458384-6249-4aa7-9134-1c0faf777dd0-logs\") pod \"fa458384-6249-4aa7-9134-1c0faf777dd0\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187717 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcmdc\" (UniqueName: \"kubernetes.io/projected/fa458384-6249-4aa7-9134-1c0faf777dd0-kube-api-access-pcmdc\") pod \"fa458384-6249-4aa7-9134-1c0faf777dd0\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187743 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-nb\") pod \"d0d73ce4-4765-4ef0-82bd-e07875e04521\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187778 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa458384-6249-4aa7-9134-1c0faf777dd0-horizon-secret-key\") pod \"fa458384-6249-4aa7-9134-1c0faf777dd0\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187795 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-scripts\") pod \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\" (UID: \"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187817 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbbpz\" (UniqueName: \"kubernetes.io/projected/d0d73ce4-4765-4ef0-82bd-e07875e04521-kube-api-access-lbbpz\") pod \"d0d73ce4-4765-4ef0-82bd-e07875e04521\" (UID: \"d0d73ce4-4765-4ef0-82bd-e07875e04521\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.187854 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-config-data\") pod \"fa458384-6249-4aa7-9134-1c0faf777dd0\" (UID: \"fa458384-6249-4aa7-9134-1c0faf777dd0\") " Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.188584 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-config-data" (OuterVolumeSpecName: "config-data") pod "81419bdd-5cb5-4d6b-83d8-81cef7faf8a1" (UID: "81419bdd-5cb5-4d6b-83d8-81cef7faf8a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.189623 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-scripts" (OuterVolumeSpecName: "scripts") pod "fa458384-6249-4aa7-9134-1c0faf777dd0" (UID: "fa458384-6249-4aa7-9134-1c0faf777dd0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.190960 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.190984 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.194594 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-kube-api-access-tgwzn" (OuterVolumeSpecName: "kube-api-access-tgwzn") pod "81419bdd-5cb5-4d6b-83d8-81cef7faf8a1" (UID: "81419bdd-5cb5-4d6b-83d8-81cef7faf8a1"). InnerVolumeSpecName "kube-api-access-tgwzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.194898 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-logs" (OuterVolumeSpecName: "logs") pod "81419bdd-5cb5-4d6b-83d8-81cef7faf8a1" (UID: "81419bdd-5cb5-4d6b-83d8-81cef7faf8a1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.196431 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa458384-6249-4aa7-9134-1c0faf777dd0-logs" (OuterVolumeSpecName: "logs") pod "fa458384-6249-4aa7-9134-1c0faf777dd0" (UID: "fa458384-6249-4aa7-9134-1c0faf777dd0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.196579 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa458384-6249-4aa7-9134-1c0faf777dd0-kube-api-access-pcmdc" (OuterVolumeSpecName: "kube-api-access-pcmdc") pod "fa458384-6249-4aa7-9134-1c0faf777dd0" (UID: "fa458384-6249-4aa7-9134-1c0faf777dd0"). InnerVolumeSpecName "kube-api-access-pcmdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.199300 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-scripts" (OuterVolumeSpecName: "scripts") pod "81419bdd-5cb5-4d6b-83d8-81cef7faf8a1" (UID: "81419bdd-5cb5-4d6b-83d8-81cef7faf8a1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.199813 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-config-data" (OuterVolumeSpecName: "config-data") pod "fa458384-6249-4aa7-9134-1c0faf777dd0" (UID: "fa458384-6249-4aa7-9134-1c0faf777dd0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.199888 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa458384-6249-4aa7-9134-1c0faf777dd0-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "fa458384-6249-4aa7-9134-1c0faf777dd0" (UID: "fa458384-6249-4aa7-9134-1c0faf777dd0"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.205825 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0d73ce4-4765-4ef0-82bd-e07875e04521-kube-api-access-lbbpz" (OuterVolumeSpecName: "kube-api-access-lbbpz") pod "d0d73ce4-4765-4ef0-82bd-e07875e04521" (UID: "d0d73ce4-4765-4ef0-82bd-e07875e04521"). InnerVolumeSpecName "kube-api-access-lbbpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.229430 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "81419bdd-5cb5-4d6b-83d8-81cef7faf8a1" (UID: "81419bdd-5cb5-4d6b-83d8-81cef7faf8a1"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.250159 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d0d73ce4-4765-4ef0-82bd-e07875e04521" (UID: "d0d73ce4-4765-4ef0-82bd-e07875e04521"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.250716 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d0d73ce4-4765-4ef0-82bd-e07875e04521" (UID: "d0d73ce4-4765-4ef0-82bd-e07875e04521"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.252454 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d0d73ce4-4765-4ef0-82bd-e07875e04521" (UID: "d0d73ce4-4765-4ef0-82bd-e07875e04521"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.255349 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d0d73ce4-4765-4ef0-82bd-e07875e04521" (UID: "d0d73ce4-4765-4ef0-82bd-e07875e04521"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.259372 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-config" (OuterVolumeSpecName: "config") pod "d0d73ce4-4765-4ef0-82bd-e07875e04521" (UID: "d0d73ce4-4765-4ef0-82bd-e07875e04521"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293026 4636 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293061 4636 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293072 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293081 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293092 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293100 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa458384-6249-4aa7-9134-1c0faf777dd0-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293108 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcmdc\" (UniqueName: \"kubernetes.io/projected/fa458384-6249-4aa7-9134-1c0faf777dd0-kube-api-access-pcmdc\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293118 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0d73ce4-4765-4ef0-82bd-e07875e04521-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293126 4636 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa458384-6249-4aa7-9134-1c0faf777dd0-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293135 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293143 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbbpz\" (UniqueName: \"kubernetes.io/projected/d0d73ce4-4765-4ef0-82bd-e07875e04521-kube-api-access-lbbpz\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293150 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa458384-6249-4aa7-9134-1c0faf777dd0-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293158 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgwzn\" (UniqueName: \"kubernetes.io/projected/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-kube-api-access-tgwzn\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.293166 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.326714 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7646d88f4d-85mgl"] Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.468234 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69fff7c54f-wpk22" event={"ID":"fa458384-6249-4aa7-9134-1c0faf777dd0","Type":"ContainerDied","Data":"c6276999d45978c74680f4507d27d6bff1ba7a3b36d4a9d1b9b54264cebf8103"} Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.468250 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69fff7c54f-wpk22" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.470104 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b45f78fcc-6f48n" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.470277 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b45f78fcc-6f48n" event={"ID":"81419bdd-5cb5-4d6b-83d8-81cef7faf8a1","Type":"ContainerDied","Data":"fa0563599d35f907c3e0876d3e318596e98fb7f644eb06f099b472849ddb62ae"} Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.472271 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" event={"ID":"d0d73ce4-4765-4ef0-82bd-e07875e04521","Type":"ContainerDied","Data":"8909feb3260e80512ecda1fa583061a61b7a7dcfd80e90e7df15c4f28249d573"} Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.472306 4636 scope.go:117] "RemoveContainer" containerID="f0ca5ffde1b510078fe1d9abb2180a0e8ee1ac0a46c7a84e2216e2d9ac6a0765" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.472389 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.523769 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-69fff7c54f-wpk22"] Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.529800 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-69fff7c54f-wpk22"] Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.581634 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-t4n68"] Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.595904 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-t4n68"] Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.617118 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5b45f78fcc-6f48n"] Oct 02 21:42:58 crc kubenswrapper[4636]: I1002 21:42:58.624343 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5b45f78fcc-6f48n"] Oct 02 21:42:59 crc kubenswrapper[4636]: I1002 21:42:59.625922 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81419bdd-5cb5-4d6b-83d8-81cef7faf8a1" path="/var/lib/kubelet/pods/81419bdd-5cb5-4d6b-83d8-81cef7faf8a1/volumes" Oct 02 21:42:59 crc kubenswrapper[4636]: I1002 21:42:59.627059 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0d73ce4-4765-4ef0-82bd-e07875e04521" path="/var/lib/kubelet/pods/d0d73ce4-4765-4ef0-82bd-e07875e04521/volumes" Oct 02 21:42:59 crc kubenswrapper[4636]: I1002 21:42:59.629339 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa458384-6249-4aa7-9134-1c0faf777dd0" path="/var/lib/kubelet/pods/fa458384-6249-4aa7-9134-1c0faf777dd0/volumes" Oct 02 21:43:02 crc kubenswrapper[4636]: I1002 21:43:02.211300 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-t4n68" podUID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.123:5353: i/o timeout" Oct 02 21:43:02 crc kubenswrapper[4636]: I1002 21:43:02.406282 4636 scope.go:117] "RemoveContainer" containerID="a8063e9aa43ef67d600ce77d28d191addf4c478fdab74576bb346b8dc876ae01" Oct 02 21:43:02 crc kubenswrapper[4636]: E1002 21:43:02.411906 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Oct 02 21:43:02 crc kubenswrapper[4636]: E1002 21:43:02.412045 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-77c82,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-llj5j_openstack(e2e7e09e-0db9-4149-83d9-80163c11d203): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 21:43:02 crc kubenswrapper[4636]: E1002 21:43:02.413232 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-llj5j" podUID="e2e7e09e-0db9-4149-83d9-80163c11d203" Oct 02 21:43:02 crc kubenswrapper[4636]: I1002 21:43:02.588560 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7646d88f4d-85mgl" event={"ID":"65063729-cda3-488f-8e94-364db15e2d2d","Type":"ContainerStarted","Data":"ca4b1fe04fe7e33a32a2df7463b72e4dbd1152c2c7372348e6873e8bc70cd84e"} Oct 02 21:43:02 crc kubenswrapper[4636]: E1002 21:43:02.615885 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-llj5j" podUID="e2e7e09e-0db9-4149-83d9-80163c11d203" Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.008418 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4vrl8"] Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.191223 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7fdh9"] Oct 02 21:43:03 crc kubenswrapper[4636]: W1002 21:43:03.196934 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11ba2a28_597f_4252_8922_6360d60a5c81.slice/crio-54f6c6067bc04ad3731eab3271d8f67d134462862fae2083784dca81aa538946 WatchSource:0}: Error finding container 54f6c6067bc04ad3731eab3271d8f67d134462862fae2083784dca81aa538946: Status 404 returned error can't find the container with id 54f6c6067bc04ad3731eab3271d8f67d134462862fae2083784dca81aa538946 Oct 02 21:43:03 crc kubenswrapper[4636]: W1002 21:43:03.298424 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0cdc7cc_5de5_40f5_ac17_12c4be5a74d6.slice/crio-d4e179a165d4eb2d28ff6dd86035e67e6ec2ff3b2385bac5903d9d42990aaa61 WatchSource:0}: Error finding container d4e179a165d4eb2d28ff6dd86035e67e6ec2ff3b2385bac5903d9d42990aaa61: Status 404 returned error can't find the container with id d4e179a165d4eb2d28ff6dd86035e67e6ec2ff3b2385bac5903d9d42990aaa61 Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.306690 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q5lqg"] Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.342459 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6897cb4484-tthsj"] Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.352365 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.362060 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-v2w52"] Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.436018 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.662831 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7646d88f4d-85mgl" event={"ID":"65063729-cda3-488f-8e94-364db15e2d2d","Type":"ContainerStarted","Data":"3cd88a149e153dcf509a1fbfa49ed066efedce2a10516a990ddad09a7052ca1e"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.662875 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7646d88f4d-85mgl" event={"ID":"65063729-cda3-488f-8e94-364db15e2d2d","Type":"ContainerStarted","Data":"46c9f6b666b7d93b89273f8d65fcbd5de1a1373331b372a664a91511a3e14cc2"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.665469 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q5lqg" event={"ID":"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6","Type":"ContainerStarted","Data":"d4e179a165d4eb2d28ff6dd86035e67e6ec2ff3b2385bac5903d9d42990aaa61"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.669535 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d83f22bd-9e6c-466a-9303-35a68c588b62","Type":"ContainerStarted","Data":"b7c9f942c725add19f9056f3d9717200a4432eeefc0e0dbb5478bb13ce112310"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.672040 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" event={"ID":"f6ae6eac-c5ec-4476-83e2-2b5532974b55","Type":"ContainerStarted","Data":"dd814149456aebd4569e0f73f8add357738fa79ef3ffde3d7e91924788f39791"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.673366 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cf9ld" event={"ID":"70ce2186-3a61-4f36-a51b-52a7bfeabdf1","Type":"ContainerStarted","Data":"12dd581a1b23a750e6aa553ae7e249bdb4fe3c8e3ec50624de5dd861d380ca16"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.674874 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7fdh9" event={"ID":"11ba2a28-597f-4252-8922-6360d60a5c81","Type":"ContainerStarted","Data":"54f6c6067bc04ad3731eab3271d8f67d134462862fae2083784dca81aa538946"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.675743 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6897cb4484-tthsj" event={"ID":"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e","Type":"ContainerStarted","Data":"8eb6df51d904d96784106541fc2fad20b7607f533043d073cfe2449f700d4b5b"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.680669 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7646d88f4d-85mgl" podStartSLOduration=25.680655337 podStartE2EDuration="25.680655337s" podCreationTimestamp="2025-10-02 21:42:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:03.678012846 +0000 UTC m=+1175.001020865" watchObservedRunningTime="2025-10-02 21:43:03.680655337 +0000 UTC m=+1175.003663356" Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.689645 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-59849968b5-h7qdr" event={"ID":"a20af78e-6d87-4dfc-82a5-e0290aa65ec0","Type":"ContainerStarted","Data":"85d988411f3419667e1747c1bd9c3495814ed5ccf2574591af7f96da5dd6c6da"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.690244 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-59849968b5-h7qdr" event={"ID":"a20af78e-6d87-4dfc-82a5-e0290aa65ec0","Type":"ContainerStarted","Data":"514ce2740ae8358ce364a1a876684bfee308392063049d40b0cd2762caa68e9a"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.690209 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-59849968b5-h7qdr" podUID="a20af78e-6d87-4dfc-82a5-e0290aa65ec0" containerName="horizon" containerID="cri-o://85d988411f3419667e1747c1bd9c3495814ed5ccf2574591af7f96da5dd6c6da" gracePeriod=30 Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.690000 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-59849968b5-h7qdr" podUID="a20af78e-6d87-4dfc-82a5-e0290aa65ec0" containerName="horizon-log" containerID="cri-o://514ce2740ae8358ce364a1a876684bfee308392063049d40b0cd2762caa68e9a" gracePeriod=30 Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.703708 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-cf9ld" podStartSLOduration=3.598804041 podStartE2EDuration="34.703688072s" podCreationTimestamp="2025-10-02 21:42:29 +0000 UTC" firstStartedPulling="2025-10-02 21:42:31.581911842 +0000 UTC m=+1142.904919861" lastFinishedPulling="2025-10-02 21:43:02.686795873 +0000 UTC m=+1174.009803892" observedRunningTime="2025-10-02 21:43:03.697017514 +0000 UTC m=+1175.020025533" watchObservedRunningTime="2025-10-02 21:43:03.703688072 +0000 UTC m=+1175.026696091" Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.710859 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f357c6e-1e7b-42d0-9719-396c2e9c89d2","Type":"ContainerStarted","Data":"65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.713759 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4vrl8" event={"ID":"1b773903-9fdb-4fdd-97b5-1c89103b3a0b","Type":"ContainerStarted","Data":"f30e5ff61f2780092fb590bcce015ccbde7c6ea6989eb15df76fa5a86d18e767"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.713816 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4vrl8" event={"ID":"1b773903-9fdb-4fdd-97b5-1c89103b3a0b","Type":"ContainerStarted","Data":"ced38f16dacf47de76216a6f42b4788a292e8fcbb2fa991fff63d99172a9caae"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.715197 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-59849968b5-h7qdr" podStartSLOduration=8.319970587 podStartE2EDuration="34.715187249s" podCreationTimestamp="2025-10-02 21:42:29 +0000 UTC" firstStartedPulling="2025-10-02 21:42:31.522632869 +0000 UTC m=+1142.845640888" lastFinishedPulling="2025-10-02 21:42:57.917849531 +0000 UTC m=+1169.240857550" observedRunningTime="2025-10-02 21:43:03.713093423 +0000 UTC m=+1175.036101442" watchObservedRunningTime="2025-10-02 21:43:03.715187249 +0000 UTC m=+1175.038195268" Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.718419 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"737df75e-16d7-4397-87ea-dc83ab9a9503","Type":"ContainerStarted","Data":"a4cded43ed1f39d9c86f626b15da0c79d4fee49de3ed2d3bece1cf0bb8460073"} Oct 02 21:43:03 crc kubenswrapper[4636]: I1002 21:43:03.743435 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-4vrl8" podStartSLOduration=24.743418182 podStartE2EDuration="24.743418182s" podCreationTimestamp="2025-10-02 21:42:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:03.728965797 +0000 UTC m=+1175.051973816" watchObservedRunningTime="2025-10-02 21:43:03.743418182 +0000 UTC m=+1175.066426201" Oct 02 21:43:04 crc kubenswrapper[4636]: I1002 21:43:04.745634 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"737df75e-16d7-4397-87ea-dc83ab9a9503","Type":"ContainerStarted","Data":"5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f"} Oct 02 21:43:04 crc kubenswrapper[4636]: I1002 21:43:04.755271 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q5lqg" event={"ID":"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6","Type":"ContainerStarted","Data":"3a47d6e81f4218b0278f11546129bb57cc53d52b9eddd4ed6e77b9d1679eaf9a"} Oct 02 21:43:04 crc kubenswrapper[4636]: I1002 21:43:04.757969 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d83f22bd-9e6c-466a-9303-35a68c588b62","Type":"ContainerStarted","Data":"07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da"} Oct 02 21:43:04 crc kubenswrapper[4636]: I1002 21:43:04.763123 4636 generic.go:334] "Generic (PLEG): container finished" podID="f6ae6eac-c5ec-4476-83e2-2b5532974b55" containerID="ca64e98dab0a8d19754187dd518e7ea15863850246dee65c260d9cb4ce8aafd1" exitCode=0 Oct 02 21:43:04 crc kubenswrapper[4636]: I1002 21:43:04.763163 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" event={"ID":"f6ae6eac-c5ec-4476-83e2-2b5532974b55","Type":"ContainerDied","Data":"ca64e98dab0a8d19754187dd518e7ea15863850246dee65c260d9cb4ce8aafd1"} Oct 02 21:43:04 crc kubenswrapper[4636]: I1002 21:43:04.798920 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-q5lqg" podStartSLOduration=17.798903382 podStartE2EDuration="17.798903382s" podCreationTimestamp="2025-10-02 21:42:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:04.778048945 +0000 UTC m=+1176.101056954" watchObservedRunningTime="2025-10-02 21:43:04.798903382 +0000 UTC m=+1176.121911401" Oct 02 21:43:04 crc kubenswrapper[4636]: I1002 21:43:04.824310 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6897cb4484-tthsj" event={"ID":"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e","Type":"ContainerStarted","Data":"8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8"} Oct 02 21:43:04 crc kubenswrapper[4636]: I1002 21:43:04.824546 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6897cb4484-tthsj" event={"ID":"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e","Type":"ContainerStarted","Data":"bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2"} Oct 02 21:43:04 crc kubenswrapper[4636]: I1002 21:43:04.854796 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6897cb4484-tthsj" podStartSLOduration=27.854782214 podStartE2EDuration="27.854782214s" podCreationTimestamp="2025-10-02 21:42:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:04.840966095 +0000 UTC m=+1176.163974114" watchObservedRunningTime="2025-10-02 21:43:04.854782214 +0000 UTC m=+1176.177790233" Oct 02 21:43:05 crc kubenswrapper[4636]: I1002 21:43:05.855570 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"737df75e-16d7-4397-87ea-dc83ab9a9503","Type":"ContainerStarted","Data":"f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b"} Oct 02 21:43:05 crc kubenswrapper[4636]: I1002 21:43:05.856029 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="737df75e-16d7-4397-87ea-dc83ab9a9503" containerName="glance-log" containerID="cri-o://5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f" gracePeriod=30 Oct 02 21:43:05 crc kubenswrapper[4636]: I1002 21:43:05.856961 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="737df75e-16d7-4397-87ea-dc83ab9a9503" containerName="glance-httpd" containerID="cri-o://f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b" gracePeriod=30 Oct 02 21:43:05 crc kubenswrapper[4636]: I1002 21:43:05.873804 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d83f22bd-9e6c-466a-9303-35a68c588b62" containerName="glance-log" containerID="cri-o://07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da" gracePeriod=30 Oct 02 21:43:05 crc kubenswrapper[4636]: I1002 21:43:05.874880 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d83f22bd-9e6c-466a-9303-35a68c588b62","Type":"ContainerStarted","Data":"5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489"} Oct 02 21:43:05 crc kubenswrapper[4636]: I1002 21:43:05.875666 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d83f22bd-9e6c-466a-9303-35a68c588b62" containerName="glance-httpd" containerID="cri-o://5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489" gracePeriod=30 Oct 02 21:43:05 crc kubenswrapper[4636]: I1002 21:43:05.899517 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=16.899500176 podStartE2EDuration="16.899500176s" podCreationTimestamp="2025-10-02 21:42:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:05.89437832 +0000 UTC m=+1177.217386339" watchObservedRunningTime="2025-10-02 21:43:05.899500176 +0000 UTC m=+1177.222508195" Oct 02 21:43:05 crc kubenswrapper[4636]: I1002 21:43:05.950048 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=16.950029595 podStartE2EDuration="16.950029595s" podCreationTimestamp="2025-10-02 21:42:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:05.939297959 +0000 UTC m=+1177.262305978" watchObservedRunningTime="2025-10-02 21:43:05.950029595 +0000 UTC m=+1177.273037614" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.395528 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.495625 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-combined-ca-bundle\") pod \"737df75e-16d7-4397-87ea-dc83ab9a9503\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.495695 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-logs\") pod \"737df75e-16d7-4397-87ea-dc83ab9a9503\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.495802 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-config-data\") pod \"737df75e-16d7-4397-87ea-dc83ab9a9503\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.495877 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"737df75e-16d7-4397-87ea-dc83ab9a9503\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.496704 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-httpd-run\") pod \"737df75e-16d7-4397-87ea-dc83ab9a9503\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.496730 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-scripts\") pod \"737df75e-16d7-4397-87ea-dc83ab9a9503\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.496744 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-logs" (OuterVolumeSpecName: "logs") pod "737df75e-16d7-4397-87ea-dc83ab9a9503" (UID: "737df75e-16d7-4397-87ea-dc83ab9a9503"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.496946 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "737df75e-16d7-4397-87ea-dc83ab9a9503" (UID: "737df75e-16d7-4397-87ea-dc83ab9a9503"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.497007 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq5d5\" (UniqueName: \"kubernetes.io/projected/737df75e-16d7-4397-87ea-dc83ab9a9503-kube-api-access-pq5d5\") pod \"737df75e-16d7-4397-87ea-dc83ab9a9503\" (UID: \"737df75e-16d7-4397-87ea-dc83ab9a9503\") " Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.497693 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.497713 4636 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/737df75e-16d7-4397-87ea-dc83ab9a9503-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.502601 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-scripts" (OuterVolumeSpecName: "scripts") pod "737df75e-16d7-4397-87ea-dc83ab9a9503" (UID: "737df75e-16d7-4397-87ea-dc83ab9a9503"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.503939 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "737df75e-16d7-4397-87ea-dc83ab9a9503" (UID: "737df75e-16d7-4397-87ea-dc83ab9a9503"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.507100 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/737df75e-16d7-4397-87ea-dc83ab9a9503-kube-api-access-pq5d5" (OuterVolumeSpecName: "kube-api-access-pq5d5") pod "737df75e-16d7-4397-87ea-dc83ab9a9503" (UID: "737df75e-16d7-4397-87ea-dc83ab9a9503"). InnerVolumeSpecName "kube-api-access-pq5d5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.523700 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "737df75e-16d7-4397-87ea-dc83ab9a9503" (UID: "737df75e-16d7-4397-87ea-dc83ab9a9503"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.578432 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-config-data" (OuterVolumeSpecName: "config-data") pod "737df75e-16d7-4397-87ea-dc83ab9a9503" (UID: "737df75e-16d7-4397-87ea-dc83ab9a9503"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.600814 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.600860 4636 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.600870 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.600880 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq5d5\" (UniqueName: \"kubernetes.io/projected/737df75e-16d7-4397-87ea-dc83ab9a9503-kube-api-access-pq5d5\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.600890 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/737df75e-16d7-4397-87ea-dc83ab9a9503-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.628733 4636 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.705336 4636 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.892323 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.895005 4636 generic.go:334] "Generic (PLEG): container finished" podID="737df75e-16d7-4397-87ea-dc83ab9a9503" containerID="f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b" exitCode=143 Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.895037 4636 generic.go:334] "Generic (PLEG): container finished" podID="737df75e-16d7-4397-87ea-dc83ab9a9503" containerID="5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f" exitCode=143 Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.895110 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"737df75e-16d7-4397-87ea-dc83ab9a9503","Type":"ContainerDied","Data":"f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b"} Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.895139 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"737df75e-16d7-4397-87ea-dc83ab9a9503","Type":"ContainerDied","Data":"5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f"} Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.895148 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"737df75e-16d7-4397-87ea-dc83ab9a9503","Type":"ContainerDied","Data":"a4cded43ed1f39d9c86f626b15da0c79d4fee49de3ed2d3bece1cf0bb8460073"} Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.895162 4636 scope.go:117] "RemoveContainer" containerID="f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.895292 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.916817 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f357c6e-1e7b-42d0-9719-396c2e9c89d2","Type":"ContainerStarted","Data":"f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245"} Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.954163 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.956320 4636 generic.go:334] "Generic (PLEG): container finished" podID="d83f22bd-9e6c-466a-9303-35a68c588b62" containerID="5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489" exitCode=0 Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.956346 4636 generic.go:334] "Generic (PLEG): container finished" podID="d83f22bd-9e6c-466a-9303-35a68c588b62" containerID="07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da" exitCode=143 Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.956382 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d83f22bd-9e6c-466a-9303-35a68c588b62","Type":"ContainerDied","Data":"5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489"} Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.956406 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d83f22bd-9e6c-466a-9303-35a68c588b62","Type":"ContainerDied","Data":"07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da"} Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.956457 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d83f22bd-9e6c-466a-9303-35a68c588b62","Type":"ContainerDied","Data":"b7c9f942c725add19f9056f3d9717200a4432eeefc0e0dbb5478bb13ce112310"} Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.956544 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.971593 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.987892 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:43:06 crc kubenswrapper[4636]: E1002 21:43:06.988324 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerName="dnsmasq-dns" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.988341 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerName="dnsmasq-dns" Oct 02 21:43:06 crc kubenswrapper[4636]: E1002 21:43:06.988358 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d83f22bd-9e6c-466a-9303-35a68c588b62" containerName="glance-log" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.988364 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d83f22bd-9e6c-466a-9303-35a68c588b62" containerName="glance-log" Oct 02 21:43:06 crc kubenswrapper[4636]: E1002 21:43:06.988381 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="737df75e-16d7-4397-87ea-dc83ab9a9503" containerName="glance-httpd" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.988388 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="737df75e-16d7-4397-87ea-dc83ab9a9503" containerName="glance-httpd" Oct 02 21:43:06 crc kubenswrapper[4636]: E1002 21:43:06.988401 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerName="init" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.988408 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerName="init" Oct 02 21:43:06 crc kubenswrapper[4636]: E1002 21:43:06.988417 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="737df75e-16d7-4397-87ea-dc83ab9a9503" containerName="glance-log" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.988423 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="737df75e-16d7-4397-87ea-dc83ab9a9503" containerName="glance-log" Oct 02 21:43:06 crc kubenswrapper[4636]: E1002 21:43:06.988437 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d83f22bd-9e6c-466a-9303-35a68c588b62" containerName="glance-httpd" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.988442 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d83f22bd-9e6c-466a-9303-35a68c588b62" containerName="glance-httpd" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.988621 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="737df75e-16d7-4397-87ea-dc83ab9a9503" containerName="glance-log" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.988642 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0d73ce4-4765-4ef0-82bd-e07875e04521" containerName="dnsmasq-dns" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.988650 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d83f22bd-9e6c-466a-9303-35a68c588b62" containerName="glance-httpd" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.988660 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="737df75e-16d7-4397-87ea-dc83ab9a9503" containerName="glance-httpd" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.988668 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d83f22bd-9e6c-466a-9303-35a68c588b62" containerName="glance-log" Oct 02 21:43:06 crc kubenswrapper[4636]: I1002 21:43:06.989542 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.000231 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.000431 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.003029 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" event={"ID":"f6ae6eac-c5ec-4476-83e2-2b5532974b55","Type":"ContainerStarted","Data":"0f268d7ec6f5089d2a12d89ecfdc5526dfcf9a1d9eac05067e5b45bd12e93de7"} Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.003486 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.010725 4636 generic.go:334] "Generic (PLEG): container finished" podID="70ce2186-3a61-4f36-a51b-52a7bfeabdf1" containerID="12dd581a1b23a750e6aa553ae7e249bdb4fe3c8e3ec50624de5dd861d380ca16" exitCode=0 Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.010793 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cf9ld" event={"ID":"70ce2186-3a61-4f36-a51b-52a7bfeabdf1","Type":"ContainerDied","Data":"12dd581a1b23a750e6aa553ae7e249bdb4fe3c8e3ec50624de5dd861d380ca16"} Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.010930 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-scripts\") pod \"d83f22bd-9e6c-466a-9303-35a68c588b62\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.011460 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-logs\") pod \"d83f22bd-9e6c-466a-9303-35a68c588b62\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.011641 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-httpd-run\") pod \"d83f22bd-9e6c-466a-9303-35a68c588b62\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.011676 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-logs" (OuterVolumeSpecName: "logs") pod "d83f22bd-9e6c-466a-9303-35a68c588b62" (UID: "d83f22bd-9e6c-466a-9303-35a68c588b62"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.011712 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-config-data\") pod \"d83f22bd-9e6c-466a-9303-35a68c588b62\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.011880 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kr9r\" (UniqueName: \"kubernetes.io/projected/d83f22bd-9e6c-466a-9303-35a68c588b62-kube-api-access-9kr9r\") pod \"d83f22bd-9e6c-466a-9303-35a68c588b62\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.011904 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"d83f22bd-9e6c-466a-9303-35a68c588b62\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.011957 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-combined-ca-bundle\") pod \"d83f22bd-9e6c-466a-9303-35a68c588b62\" (UID: \"d83f22bd-9e6c-466a-9303-35a68c588b62\") " Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.012033 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d83f22bd-9e6c-466a-9303-35a68c588b62" (UID: "d83f22bd-9e6c-466a-9303-35a68c588b62"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.012368 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.012383 4636 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d83f22bd-9e6c-466a-9303-35a68c588b62-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.028019 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d83f22bd-9e6c-466a-9303-35a68c588b62-kube-api-access-9kr9r" (OuterVolumeSpecName: "kube-api-access-9kr9r") pod "d83f22bd-9e6c-466a-9303-35a68c588b62" (UID: "d83f22bd-9e6c-466a-9303-35a68c588b62"). InnerVolumeSpecName "kube-api-access-9kr9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.031714 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.049013 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-scripts" (OuterVolumeSpecName: "scripts") pod "d83f22bd-9e6c-466a-9303-35a68c588b62" (UID: "d83f22bd-9e6c-466a-9303-35a68c588b62"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.077033 4636 scope.go:117] "RemoveContainer" containerID="5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.107044 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "d83f22bd-9e6c-466a-9303-35a68c588b62" (UID: "d83f22bd-9e6c-466a-9303-35a68c588b62"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.108860 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" podStartSLOduration=18.108847544 podStartE2EDuration="18.108847544s" podCreationTimestamp="2025-10-02 21:42:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:07.077293762 +0000 UTC m=+1178.400301781" watchObservedRunningTime="2025-10-02 21:43:07.108847544 +0000 UTC m=+1178.431855563" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.115066 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.115253 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-scripts\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.115348 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-config-data\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.115388 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn8qr\" (UniqueName: \"kubernetes.io/projected/5e40a0d0-1f71-4064-924c-df6addeee8b1-kube-api-access-hn8qr\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.115458 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.115514 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-logs\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.115589 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.115653 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.115812 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kr9r\" (UniqueName: \"kubernetes.io/projected/d83f22bd-9e6c-466a-9303-35a68c588b62-kube-api-access-9kr9r\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.115856 4636 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.115866 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.118436 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d83f22bd-9e6c-466a-9303-35a68c588b62" (UID: "d83f22bd-9e6c-466a-9303-35a68c588b62"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.136036 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-config-data" (OuterVolumeSpecName: "config-data") pod "d83f22bd-9e6c-466a-9303-35a68c588b62" (UID: "d83f22bd-9e6c-466a-9303-35a68c588b62"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.152143 4636 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.170900 4636 scope.go:117] "RemoveContainer" containerID="f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b" Oct 02 21:43:07 crc kubenswrapper[4636]: E1002 21:43:07.175311 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b\": container with ID starting with f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b not found: ID does not exist" containerID="f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.175416 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b"} err="failed to get container status \"f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b\": rpc error: code = NotFound desc = could not find container \"f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b\": container with ID starting with f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b not found: ID does not exist" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.175505 4636 scope.go:117] "RemoveContainer" containerID="5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f" Oct 02 21:43:07 crc kubenswrapper[4636]: E1002 21:43:07.178580 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f\": container with ID starting with 5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f not found: ID does not exist" containerID="5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.178606 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f"} err="failed to get container status \"5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f\": rpc error: code = NotFound desc = could not find container \"5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f\": container with ID starting with 5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f not found: ID does not exist" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.178620 4636 scope.go:117] "RemoveContainer" containerID="f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.179309 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b"} err="failed to get container status \"f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b\": rpc error: code = NotFound desc = could not find container \"f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b\": container with ID starting with f1f5e6b7676fbe7fdded6a6ed401952bc5d1414f06d80445be5578e4ce4cee6b not found: ID does not exist" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.179353 4636 scope.go:117] "RemoveContainer" containerID="5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.179701 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f"} err="failed to get container status \"5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f\": rpc error: code = NotFound desc = could not find container \"5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f\": container with ID starting with 5c530854ff9237a9ddf0ba44cdc1d2e12464e7a9c4b639e6cb77bb0fec7b388f not found: ID does not exist" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.179718 4636 scope.go:117] "RemoveContainer" containerID="5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.215802 4636 scope.go:117] "RemoveContainer" containerID="07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.217019 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.217064 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.217112 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.217178 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-scripts\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.217207 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-config-data\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.217223 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn8qr\" (UniqueName: \"kubernetes.io/projected/5e40a0d0-1f71-4064-924c-df6addeee8b1-kube-api-access-hn8qr\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.217282 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.217308 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-logs\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.217377 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.217389 4636 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.217415 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d83f22bd-9e6c-466a-9303-35a68c588b62-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.220547 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.220969 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.221551 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-logs\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.228615 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-scripts\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.233323 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-config-data\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.233478 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.237375 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.254096 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn8qr\" (UniqueName: \"kubernetes.io/projected/5e40a0d0-1f71-4064-924c-df6addeee8b1-kube-api-access-hn8qr\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.258007 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.259255 4636 scope.go:117] "RemoveContainer" containerID="5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489" Oct 02 21:43:07 crc kubenswrapper[4636]: E1002 21:43:07.260549 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489\": container with ID starting with 5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489 not found: ID does not exist" containerID="5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.260595 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489"} err="failed to get container status \"5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489\": rpc error: code = NotFound desc = could not find container \"5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489\": container with ID starting with 5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489 not found: ID does not exist" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.260621 4636 scope.go:117] "RemoveContainer" containerID="07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da" Oct 02 21:43:07 crc kubenswrapper[4636]: E1002 21:43:07.263859 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da\": container with ID starting with 07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da not found: ID does not exist" containerID="07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.263906 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da"} err="failed to get container status \"07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da\": rpc error: code = NotFound desc = could not find container \"07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da\": container with ID starting with 07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da not found: ID does not exist" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.263939 4636 scope.go:117] "RemoveContainer" containerID="5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.270294 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489"} err="failed to get container status \"5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489\": rpc error: code = NotFound desc = could not find container \"5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489\": container with ID starting with 5d0d652b61513bb0268b5809cf3b18531d6bb5bec5b15fb9ca8ecd85b51ba489 not found: ID does not exist" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.270336 4636 scope.go:117] "RemoveContainer" containerID="07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.272155 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da"} err="failed to get container status \"07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da\": rpc error: code = NotFound desc = could not find container \"07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da\": container with ID starting with 07565a328c926b922610d7426d6fdbb18cbcdac3be02ba7cef7c6eec532e19da not found: ID does not exist" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.307828 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.394539 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.423005 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.424543 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.428462 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.428501 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.431251 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.439795 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.561470 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.561519 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.561552 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdsw2\" (UniqueName: \"kubernetes.io/projected/c40074e6-e952-4d2f-b14d-2860b9eba108-kube-api-access-pdsw2\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.561574 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.561620 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.561645 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.561678 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.561701 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-logs\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.615689 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="737df75e-16d7-4397-87ea-dc83ab9a9503" path="/var/lib/kubelet/pods/737df75e-16d7-4397-87ea-dc83ab9a9503/volumes" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.620323 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d83f22bd-9e6c-466a-9303-35a68c588b62" path="/var/lib/kubelet/pods/d83f22bd-9e6c-466a-9303-35a68c588b62/volumes" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.674975 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.675051 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.675097 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdsw2\" (UniqueName: \"kubernetes.io/projected/c40074e6-e952-4d2f-b14d-2860b9eba108-kube-api-access-pdsw2\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.675130 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.675221 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.675256 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.675301 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.675340 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-logs\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.676276 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-logs\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.678191 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.683700 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.684718 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.696601 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.697442 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.699353 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.717841 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdsw2\" (UniqueName: \"kubernetes.io/projected/c40074e6-e952-4d2f-b14d-2860b9eba108-kube-api-access-pdsw2\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:07 crc kubenswrapper[4636]: I1002 21:43:07.778512 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.052781 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.192351 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.370866 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.372064 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.419234 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cf9ld" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.475862 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.475905 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.514450 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-combined-ca-bundle\") pod \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.514512 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-config-data\") pod \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.514564 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-scripts\") pod \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.514580 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlcmx\" (UniqueName: \"kubernetes.io/projected/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-kube-api-access-mlcmx\") pod \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.514628 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-logs\") pod \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\" (UID: \"70ce2186-3a61-4f36-a51b-52a7bfeabdf1\") " Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.515636 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-logs" (OuterVolumeSpecName: "logs") pod "70ce2186-3a61-4f36-a51b-52a7bfeabdf1" (UID: "70ce2186-3a61-4f36-a51b-52a7bfeabdf1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.527945 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-scripts" (OuterVolumeSpecName: "scripts") pod "70ce2186-3a61-4f36-a51b-52a7bfeabdf1" (UID: "70ce2186-3a61-4f36-a51b-52a7bfeabdf1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.531958 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-kube-api-access-mlcmx" (OuterVolumeSpecName: "kube-api-access-mlcmx") pod "70ce2186-3a61-4f36-a51b-52a7bfeabdf1" (UID: "70ce2186-3a61-4f36-a51b-52a7bfeabdf1"). InnerVolumeSpecName "kube-api-access-mlcmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.564816 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70ce2186-3a61-4f36-a51b-52a7bfeabdf1" (UID: "70ce2186-3a61-4f36-a51b-52a7bfeabdf1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.565833 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-config-data" (OuterVolumeSpecName: "config-data") pod "70ce2186-3a61-4f36-a51b-52a7bfeabdf1" (UID: "70ce2186-3a61-4f36-a51b-52a7bfeabdf1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.616267 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.616297 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.616308 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlcmx\" (UniqueName: \"kubernetes.io/projected/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-kube-api-access-mlcmx\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.616317 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.616325 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70ce2186-3a61-4f36-a51b-52a7bfeabdf1-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:08 crc kubenswrapper[4636]: I1002 21:43:08.699230 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.084649 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cf9ld" event={"ID":"70ce2186-3a61-4f36-a51b-52a7bfeabdf1","Type":"ContainerDied","Data":"a1091b4f2eaeac62c38c62852bc266df247c7908979c2a0a9933304632a5b91c"} Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.084967 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1091b4f2eaeac62c38c62852bc266df247c7908979c2a0a9933304632a5b91c" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.084693 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cf9ld" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.094524 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5e40a0d0-1f71-4064-924c-df6addeee8b1","Type":"ContainerStarted","Data":"09e0902ed5559e590cb236400c26fcbb1b66815e41d222099e5191a4e2c0be64"} Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.096182 4636 generic.go:334] "Generic (PLEG): container finished" podID="d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6" containerID="3a47d6e81f4218b0278f11546129bb57cc53d52b9eddd4ed6e77b9d1679eaf9a" exitCode=0 Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.096224 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q5lqg" event={"ID":"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6","Type":"ContainerDied","Data":"3a47d6e81f4218b0278f11546129bb57cc53d52b9eddd4ed6e77b9d1679eaf9a"} Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.127118 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c40074e6-e952-4d2f-b14d-2860b9eba108","Type":"ContainerStarted","Data":"a3dbac607a2b9678d603890d25fb01971466e938dbe041ccef81dea2020c6493"} Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.161220 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-574b88487b-hjf97"] Oct 02 21:43:09 crc kubenswrapper[4636]: E1002 21:43:09.161946 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70ce2186-3a61-4f36-a51b-52a7bfeabdf1" containerName="placement-db-sync" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.161961 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="70ce2186-3a61-4f36-a51b-52a7bfeabdf1" containerName="placement-db-sync" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.162157 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="70ce2186-3a61-4f36-a51b-52a7bfeabdf1" containerName="placement-db-sync" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.165248 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.169220 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.169472 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.169572 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rz4bg" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.169656 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.169772 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.179430 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-574b88487b-hjf97"] Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.230286 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-config-data\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.230378 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-public-tls-certs\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.230437 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jppm\" (UniqueName: \"kubernetes.io/projected/52887062-8197-405e-a4ee-6387b60fbf61-kube-api-access-2jppm\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.230459 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-combined-ca-bundle\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.230481 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-internal-tls-certs\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.232563 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-scripts\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.232605 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52887062-8197-405e-a4ee-6387b60fbf61-logs\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.334580 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-public-tls-certs\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.334908 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jppm\" (UniqueName: \"kubernetes.io/projected/52887062-8197-405e-a4ee-6387b60fbf61-kube-api-access-2jppm\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.334932 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-combined-ca-bundle\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.334954 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-internal-tls-certs\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.335008 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-scripts\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.335030 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52887062-8197-405e-a4ee-6387b60fbf61-logs\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.335073 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-config-data\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.336188 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52887062-8197-405e-a4ee-6387b60fbf61-logs\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.343242 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-combined-ca-bundle\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.357405 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-config-data\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.359383 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-internal-tls-certs\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.359531 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-public-tls-certs\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.359837 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52887062-8197-405e-a4ee-6387b60fbf61-scripts\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.367965 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jppm\" (UniqueName: \"kubernetes.io/projected/52887062-8197-405e-a4ee-6387b60fbf61-kube-api-access-2jppm\") pod \"placement-574b88487b-hjf97\" (UID: \"52887062-8197-405e-a4ee-6387b60fbf61\") " pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.509418 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:09 crc kubenswrapper[4636]: I1002 21:43:09.765072 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:43:10 crc kubenswrapper[4636]: I1002 21:43:10.148563 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5e40a0d0-1f71-4064-924c-df6addeee8b1","Type":"ContainerStarted","Data":"348097f1c42464400e4a64922a142283693b09203a5145ea056b31d48fa9dcbc"} Oct 02 21:43:10 crc kubenswrapper[4636]: I1002 21:43:10.150527 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c40074e6-e952-4d2f-b14d-2860b9eba108","Type":"ContainerStarted","Data":"6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d"} Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.110830 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.180249 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q5lqg" event={"ID":"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6","Type":"ContainerDied","Data":"d4e179a165d4eb2d28ff6dd86035e67e6ec2ff3b2385bac5903d9d42990aaa61"} Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.180280 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4e179a165d4eb2d28ff6dd86035e67e6ec2ff3b2385bac5903d9d42990aaa61" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.180328 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q5lqg" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.234108 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnrdk\" (UniqueName: \"kubernetes.io/projected/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-kube-api-access-jnrdk\") pod \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.234170 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-config-data\") pod \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.234242 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-combined-ca-bundle\") pod \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.234275 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-fernet-keys\") pod \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.234399 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-credential-keys\") pod \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.234415 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-scripts\") pod \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\" (UID: \"d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6\") " Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.239484 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6" (UID: "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.246856 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-scripts" (OuterVolumeSpecName: "scripts") pod "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6" (UID: "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.248853 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6" (UID: "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.249892 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-kube-api-access-jnrdk" (OuterVolumeSpecName: "kube-api-access-jnrdk") pod "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6" (UID: "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6"). InnerVolumeSpecName "kube-api-access-jnrdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.274004 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-config-data" (OuterVolumeSpecName: "config-data") pod "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6" (UID: "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.301913 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6" (UID: "d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.336688 4636 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.336722 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.336732 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnrdk\" (UniqueName: \"kubernetes.io/projected/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-kube-api-access-jnrdk\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.336743 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.336766 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:13 crc kubenswrapper[4636]: I1002 21:43:13.336776 4636 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.256577 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-767589cc85-t7ltn"] Oct 02 21:43:14 crc kubenswrapper[4636]: E1002 21:43:14.258052 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6" containerName="keystone-bootstrap" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.258142 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6" containerName="keystone-bootstrap" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.258354 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6" containerName="keystone-bootstrap" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.259109 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.260900 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jrtvb" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.261076 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.261889 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.262069 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.262243 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.262349 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.266251 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-767589cc85-t7ltn"] Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.358109 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g4rd\" (UniqueName: \"kubernetes.io/projected/a93dc4a5-ad77-418a-9cd1-23501d201acd-kube-api-access-8g4rd\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.358161 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-fernet-keys\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.358210 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-config-data\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.358227 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-internal-tls-certs\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.358260 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-public-tls-certs\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.358294 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-scripts\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.358311 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-combined-ca-bundle\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.358333 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-credential-keys\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.459838 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g4rd\" (UniqueName: \"kubernetes.io/projected/a93dc4a5-ad77-418a-9cd1-23501d201acd-kube-api-access-8g4rd\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.459892 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-fernet-keys\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.459942 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-config-data\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.459958 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-internal-tls-certs\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.459993 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-public-tls-certs\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.460019 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-scripts\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.460047 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-combined-ca-bundle\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.460070 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-credential-keys\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.470117 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-public-tls-certs\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.470422 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-fernet-keys\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.470455 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-scripts\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.470616 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-combined-ca-bundle\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.470735 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-internal-tls-certs\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.471121 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-credential-keys\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.489375 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a93dc4a5-ad77-418a-9cd1-23501d201acd-config-data\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.523505 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g4rd\" (UniqueName: \"kubernetes.io/projected/a93dc4a5-ad77-418a-9cd1-23501d201acd-kube-api-access-8g4rd\") pod \"keystone-767589cc85-t7ltn\" (UID: \"a93dc4a5-ad77-418a-9cd1-23501d201acd\") " pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:14 crc kubenswrapper[4636]: I1002 21:43:14.580120 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:15 crc kubenswrapper[4636]: I1002 21:43:15.005919 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:43:15 crc kubenswrapper[4636]: I1002 21:43:15.077375 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-6d2jz"] Oct 02 21:43:15 crc kubenswrapper[4636]: I1002 21:43:15.077607 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" podUID="76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" containerName="dnsmasq-dns" containerID="cri-o://234d94949936a75c5197c9da185e310e6b461d43c3a68b46f2d78f9a4ac34e7e" gracePeriod=10 Oct 02 21:43:15 crc kubenswrapper[4636]: I1002 21:43:15.208309 4636 generic.go:334] "Generic (PLEG): container finished" podID="76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" containerID="234d94949936a75c5197c9da185e310e6b461d43c3a68b46f2d78f9a4ac34e7e" exitCode=0 Oct 02 21:43:15 crc kubenswrapper[4636]: I1002 21:43:15.208626 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" event={"ID":"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31","Type":"ContainerDied","Data":"234d94949936a75c5197c9da185e310e6b461d43c3a68b46f2d78f9a4ac34e7e"} Oct 02 21:43:15 crc kubenswrapper[4636]: I1002 21:43:15.311966 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" podUID="76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.140:5353: connect: connection refused" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.374285 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6897cb4484-tthsj" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.478278 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7646d88f4d-85mgl" podUID="65063729-cda3-488f-8e94-364db15e2d2d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.597786 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.631500 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-nb\") pod \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.631613 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-svc\") pod \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.631684 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-config\") pod \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.631729 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-sb\") pod \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.631823 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-585ls\" (UniqueName: \"kubernetes.io/projected/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-kube-api-access-585ls\") pod \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.631847 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-swift-storage-0\") pod \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\" (UID: \"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31\") " Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.668212 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-kube-api-access-585ls" (OuterVolumeSpecName: "kube-api-access-585ls") pod "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" (UID: "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31"). InnerVolumeSpecName "kube-api-access-585ls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.717368 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" (UID: "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.736915 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.736942 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-585ls\" (UniqueName: \"kubernetes.io/projected/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-kube-api-access-585ls\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.780799 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-574b88487b-hjf97"] Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.793922 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-767589cc85-t7ltn"] Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.928801 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-config" (OuterVolumeSpecName: "config") pod "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" (UID: "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.941287 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" (UID: "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.941959 4636 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.941988 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.942524 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" (UID: "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:18 crc kubenswrapper[4636]: I1002 21:43:18.992336 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" (UID: "76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.048443 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.048469 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.286088 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f357c6e-1e7b-42d0-9719-396c2e9c89d2","Type":"ContainerStarted","Data":"9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b"} Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.288818 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-767589cc85-t7ltn" event={"ID":"a93dc4a5-ad77-418a-9cd1-23501d201acd","Type":"ContainerStarted","Data":"aa95335183d49496e6de29650a19fa0f884ae54480b97342b2bdf3733a2c6122"} Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.294207 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" event={"ID":"76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31","Type":"ContainerDied","Data":"1bb210b640e3ae152fdc092e307286b651f5860d65d382db4f092c37f834a20e"} Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.294767 4636 scope.go:117] "RemoveContainer" containerID="234d94949936a75c5197c9da185e310e6b461d43c3a68b46f2d78f9a4ac34e7e" Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.294916 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-6d2jz" Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.308903 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-574b88487b-hjf97" event={"ID":"52887062-8197-405e-a4ee-6387b60fbf61","Type":"ContainerStarted","Data":"fe481e260e460effed2cd4694fef9bf5068bdb000e24e879c061a2386cb035b9"} Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.320692 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5e40a0d0-1f71-4064-924c-df6addeee8b1","Type":"ContainerStarted","Data":"ddbd9b71f0e9550a9082a3b68f16b1b95edcc5aa1b2c1a592734dcf201ce2d96"} Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.339286 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7fdh9" event={"ID":"11ba2a28-597f-4252-8922-6360d60a5c81","Type":"ContainerStarted","Data":"f08c8397ab1d0020668ad7833d6b91aa201402ed5b8bad0c4b48d72bc7d2e423"} Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.351695 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=13.351674204 podStartE2EDuration="13.351674204s" podCreationTimestamp="2025-10-02 21:43:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:19.34328776 +0000 UTC m=+1190.666295779" watchObservedRunningTime="2025-10-02 21:43:19.351674204 +0000 UTC m=+1190.674682223" Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.372556 4636 scope.go:117] "RemoveContainer" containerID="51603bc73410f3c4b241be723b0bfef7ae80d86a0eefe88cc2bf51151c85f58d" Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.389907 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-7fdh9" podStartSLOduration=26.397354562 podStartE2EDuration="41.389889014s" podCreationTimestamp="2025-10-02 21:42:38 +0000 UTC" firstStartedPulling="2025-10-02 21:43:03.20244137 +0000 UTC m=+1174.525449389" lastFinishedPulling="2025-10-02 21:43:18.194975822 +0000 UTC m=+1189.517983841" observedRunningTime="2025-10-02 21:43:19.383119153 +0000 UTC m=+1190.706127172" watchObservedRunningTime="2025-10-02 21:43:19.389889014 +0000 UTC m=+1190.712897033" Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.422279 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-6d2jz"] Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.444040 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-6d2jz"] Oct 02 21:43:19 crc kubenswrapper[4636]: I1002 21:43:19.622676 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" path="/var/lib/kubelet/pods/76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31/volumes" Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.350320 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c40074e6-e952-4d2f-b14d-2860b9eba108","Type":"ContainerStarted","Data":"2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5"} Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.354361 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-767589cc85-t7ltn" event={"ID":"a93dc4a5-ad77-418a-9cd1-23501d201acd","Type":"ContainerStarted","Data":"d7a0c16530846e5f881067809f751bc04a6ec36c3942144c0345a2b1bd283248"} Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.354818 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.359603 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-574b88487b-hjf97" event={"ID":"52887062-8197-405e-a4ee-6387b60fbf61","Type":"ContainerStarted","Data":"0de695175db4a35b427afbb38276d4fb2e1847e9af0a95923377fec191440574"} Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.359631 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-574b88487b-hjf97" event={"ID":"52887062-8197-405e-a4ee-6387b60fbf61","Type":"ContainerStarted","Data":"1b6f903b767db9a5a9a63ff02a4432f671311c4d896e298f4a04964dac9558a4"} Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.360161 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.360188 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.361710 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-llj5j" event={"ID":"e2e7e09e-0db9-4149-83d9-80163c11d203","Type":"ContainerStarted","Data":"42666bb860e39a1ed1b61416944660e0cfd3c3b77ab86eebc77bdf16462548fd"} Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.404514 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=13.404494892 podStartE2EDuration="13.404494892s" podCreationTimestamp="2025-10-02 21:43:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:20.388547286 +0000 UTC m=+1191.711555305" watchObservedRunningTime="2025-10-02 21:43:20.404494892 +0000 UTC m=+1191.727502911" Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.425870 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-767589cc85-t7ltn" podStartSLOduration=6.425856603 podStartE2EDuration="6.425856603s" podCreationTimestamp="2025-10-02 21:43:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:20.419572695 +0000 UTC m=+1191.742580714" watchObservedRunningTime="2025-10-02 21:43:20.425856603 +0000 UTC m=+1191.748864612" Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.442920 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-llj5j" podStartSLOduration=3.034756885 podStartE2EDuration="46.442900928s" podCreationTimestamp="2025-10-02 21:42:34 +0000 UTC" firstStartedPulling="2025-10-02 21:42:34.885891013 +0000 UTC m=+1146.208899032" lastFinishedPulling="2025-10-02 21:43:18.294035056 +0000 UTC m=+1189.617043075" observedRunningTime="2025-10-02 21:43:20.438050878 +0000 UTC m=+1191.761058897" watchObservedRunningTime="2025-10-02 21:43:20.442900928 +0000 UTC m=+1191.765908957" Oct 02 21:43:20 crc kubenswrapper[4636]: I1002 21:43:20.469699 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-574b88487b-hjf97" podStartSLOduration=11.469684773 podStartE2EDuration="11.469684773s" podCreationTimestamp="2025-10-02 21:43:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:20.46431981 +0000 UTC m=+1191.787327819" watchObservedRunningTime="2025-10-02 21:43:20.469684773 +0000 UTC m=+1191.792692782" Oct 02 21:43:24 crc kubenswrapper[4636]: I1002 21:43:24.395779 4636 generic.go:334] "Generic (PLEG): container finished" podID="11ba2a28-597f-4252-8922-6360d60a5c81" containerID="f08c8397ab1d0020668ad7833d6b91aa201402ed5b8bad0c4b48d72bc7d2e423" exitCode=0 Oct 02 21:43:24 crc kubenswrapper[4636]: I1002 21:43:24.395850 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7fdh9" event={"ID":"11ba2a28-597f-4252-8922-6360d60a5c81","Type":"ContainerDied","Data":"f08c8397ab1d0020668ad7833d6b91aa201402ed5b8bad0c4b48d72bc7d2e423"} Oct 02 21:43:27 crc kubenswrapper[4636]: I1002 21:43:27.428942 4636 generic.go:334] "Generic (PLEG): container finished" podID="e2e7e09e-0db9-4149-83d9-80163c11d203" containerID="42666bb860e39a1ed1b61416944660e0cfd3c3b77ab86eebc77bdf16462548fd" exitCode=0 Oct 02 21:43:27 crc kubenswrapper[4636]: I1002 21:43:27.429506 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-llj5j" event={"ID":"e2e7e09e-0db9-4149-83d9-80163c11d203","Type":"ContainerDied","Data":"42666bb860e39a1ed1b61416944660e0cfd3c3b77ab86eebc77bdf16462548fd"} Oct 02 21:43:27 crc kubenswrapper[4636]: I1002 21:43:27.440403 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 02 21:43:27 crc kubenswrapper[4636]: I1002 21:43:27.440658 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 02 21:43:27 crc kubenswrapper[4636]: I1002 21:43:27.486032 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 02 21:43:27 crc kubenswrapper[4636]: I1002 21:43:27.491679 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.054906 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.055237 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.083331 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.083961 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.103672 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.209952 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-combined-ca-bundle\") pod \"11ba2a28-597f-4252-8922-6360d60a5c81\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.210066 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7n95\" (UniqueName: \"kubernetes.io/projected/11ba2a28-597f-4252-8922-6360d60a5c81-kube-api-access-n7n95\") pod \"11ba2a28-597f-4252-8922-6360d60a5c81\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.210082 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-db-sync-config-data\") pod \"11ba2a28-597f-4252-8922-6360d60a5c81\" (UID: \"11ba2a28-597f-4252-8922-6360d60a5c81\") " Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.215930 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11ba2a28-597f-4252-8922-6360d60a5c81-kube-api-access-n7n95" (OuterVolumeSpecName: "kube-api-access-n7n95") pod "11ba2a28-597f-4252-8922-6360d60a5c81" (UID: "11ba2a28-597f-4252-8922-6360d60a5c81"). InnerVolumeSpecName "kube-api-access-n7n95". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.219412 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "11ba2a28-597f-4252-8922-6360d60a5c81" (UID: "11ba2a28-597f-4252-8922-6360d60a5c81"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.239572 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "11ba2a28-597f-4252-8922-6360d60a5c81" (UID: "11ba2a28-597f-4252-8922-6360d60a5c81"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.312362 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.312397 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7n95\" (UniqueName: \"kubernetes.io/projected/11ba2a28-597f-4252-8922-6360d60a5c81-kube-api-access-n7n95\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.312409 4636 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11ba2a28-597f-4252-8922-6360d60a5c81-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.448467 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7fdh9" event={"ID":"11ba2a28-597f-4252-8922-6360d60a5c81","Type":"ContainerDied","Data":"54f6c6067bc04ad3731eab3271d8f67d134462862fae2083784dca81aa538946"} Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.448524 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54f6c6067bc04ad3731eab3271d8f67d134462862fae2083784dca81aa538946" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.448531 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7fdh9" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.451899 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="ceilometer-central-agent" containerID="cri-o://65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35" gracePeriod=30 Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.452102 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f357c6e-1e7b-42d0-9719-396c2e9c89d2","Type":"ContainerStarted","Data":"f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07"} Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.454977 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="proxy-httpd" containerID="cri-o://f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07" gracePeriod=30 Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.455039 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="sg-core" containerID="cri-o://9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b" gracePeriod=30 Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.455202 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="ceilometer-notification-agent" containerID="cri-o://f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245" gracePeriod=30 Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.455565 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.455602 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.455613 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.455622 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.456120 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.727165 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-llj5j" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.742399 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.463374954 podStartE2EDuration="59.742382247s" podCreationTimestamp="2025-10-02 21:42:29 +0000 UTC" firstStartedPulling="2025-10-02 21:42:31.674974016 +0000 UTC m=+1142.997982035" lastFinishedPulling="2025-10-02 21:43:27.953981319 +0000 UTC m=+1199.276989328" observedRunningTime="2025-10-02 21:43:28.490108063 +0000 UTC m=+1199.813116082" watchObservedRunningTime="2025-10-02 21:43:28.742382247 +0000 UTC m=+1200.065390266" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.822559 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-scripts\") pod \"e2e7e09e-0db9-4149-83d9-80163c11d203\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.823533 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-combined-ca-bundle\") pod \"e2e7e09e-0db9-4149-83d9-80163c11d203\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.823630 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-config-data\") pod \"e2e7e09e-0db9-4149-83d9-80163c11d203\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.823648 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77c82\" (UniqueName: \"kubernetes.io/projected/e2e7e09e-0db9-4149-83d9-80163c11d203-kube-api-access-77c82\") pod \"e2e7e09e-0db9-4149-83d9-80163c11d203\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.823888 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2e7e09e-0db9-4149-83d9-80163c11d203-etc-machine-id\") pod \"e2e7e09e-0db9-4149-83d9-80163c11d203\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.823977 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-db-sync-config-data\") pod \"e2e7e09e-0db9-4149-83d9-80163c11d203\" (UID: \"e2e7e09e-0db9-4149-83d9-80163c11d203\") " Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.826896 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2e7e09e-0db9-4149-83d9-80163c11d203-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e2e7e09e-0db9-4149-83d9-80163c11d203" (UID: "e2e7e09e-0db9-4149-83d9-80163c11d203"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.839857 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2e7e09e-0db9-4149-83d9-80163c11d203-kube-api-access-77c82" (OuterVolumeSpecName: "kube-api-access-77c82") pod "e2e7e09e-0db9-4149-83d9-80163c11d203" (UID: "e2e7e09e-0db9-4149-83d9-80163c11d203"). InnerVolumeSpecName "kube-api-access-77c82". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.840090 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-scripts" (OuterVolumeSpecName: "scripts") pod "e2e7e09e-0db9-4149-83d9-80163c11d203" (UID: "e2e7e09e-0db9-4149-83d9-80163c11d203"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.851045 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e2e7e09e-0db9-4149-83d9-80163c11d203" (UID: "e2e7e09e-0db9-4149-83d9-80163c11d203"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.872071 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2e7e09e-0db9-4149-83d9-80163c11d203" (UID: "e2e7e09e-0db9-4149-83d9-80163c11d203"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.924653 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-config-data" (OuterVolumeSpecName: "config-data") pod "e2e7e09e-0db9-4149-83d9-80163c11d203" (UID: "e2e7e09e-0db9-4149-83d9-80163c11d203"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.925560 4636 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2e7e09e-0db9-4149-83d9-80163c11d203-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.925584 4636 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.925594 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.925604 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.925613 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e7e09e-0db9-4149-83d9-80163c11d203-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:28 crc kubenswrapper[4636]: I1002 21:43:28.925620 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77c82\" (UniqueName: \"kubernetes.io/projected/e2e7e09e-0db9-4149-83d9-80163c11d203-kube-api-access-77c82\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.355107 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6f9d95566c-lfqlc"] Oct 02 21:43:29 crc kubenswrapper[4636]: E1002 21:43:29.355950 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" containerName="dnsmasq-dns" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.355968 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" containerName="dnsmasq-dns" Oct 02 21:43:29 crc kubenswrapper[4636]: E1002 21:43:29.355984 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11ba2a28-597f-4252-8922-6360d60a5c81" containerName="barbican-db-sync" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.355991 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="11ba2a28-597f-4252-8922-6360d60a5c81" containerName="barbican-db-sync" Oct 02 21:43:29 crc kubenswrapper[4636]: E1002 21:43:29.356010 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e7e09e-0db9-4149-83d9-80163c11d203" containerName="cinder-db-sync" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.356016 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e7e09e-0db9-4149-83d9-80163c11d203" containerName="cinder-db-sync" Oct 02 21:43:29 crc kubenswrapper[4636]: E1002 21:43:29.356029 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" containerName="init" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.356035 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" containerName="init" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.356192 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2e7e09e-0db9-4149-83d9-80163c11d203" containerName="cinder-db-sync" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.356213 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="76f6c8f4-6f4e-4c5d-94dd-4fdd5da29f31" containerName="dnsmasq-dns" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.356222 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="11ba2a28-597f-4252-8922-6360d60a5c81" containerName="barbican-db-sync" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.357180 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.363782 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.364853 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-7cqzv" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.365110 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.401102 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6f9d95566c-lfqlc"] Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.426767 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-c5fc5f7c4-cvl68"] Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.428268 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.435405 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-c5fc5f7c4-cvl68"] Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.437436 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.481267 4636 generic.go:334] "Generic (PLEG): container finished" podID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerID="f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07" exitCode=0 Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.481294 4636 generic.go:334] "Generic (PLEG): container finished" podID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerID="9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b" exitCode=2 Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.481302 4636 generic.go:334] "Generic (PLEG): container finished" podID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerID="65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35" exitCode=0 Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.481339 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f357c6e-1e7b-42d0-9719-396c2e9c89d2","Type":"ContainerDied","Data":"f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07"} Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.481365 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f357c6e-1e7b-42d0-9719-396c2e9c89d2","Type":"ContainerDied","Data":"9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b"} Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.481376 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f357c6e-1e7b-42d0-9719-396c2e9c89d2","Type":"ContainerDied","Data":"65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35"} Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.483201 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-llj5j" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.483848 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-llj5j" event={"ID":"e2e7e09e-0db9-4149-83d9-80163c11d203","Type":"ContainerDied","Data":"9d9da945c3bfef0e9e6b1cd80029bb9812b6040bd099e95b94e29ae1bcbe1b58"} Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.483902 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d9da945c3bfef0e9e6b1cd80029bb9812b6040bd099e95b94e29ae1bcbe1b58" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.552553 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8eb7caa-f57a-474e-b86a-f85079b23081-combined-ca-bundle\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.552606 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9140b0c4-6589-4b25-b300-0d4421daca16-logs\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.552646 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9140b0c4-6589-4b25-b300-0d4421daca16-config-data\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.552690 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9140b0c4-6589-4b25-b300-0d4421daca16-combined-ca-bundle\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.552712 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7caa-f57a-474e-b86a-f85079b23081-logs\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.552761 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8eb7caa-f57a-474e-b86a-f85079b23081-config-data-custom\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.552784 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8eb7caa-f57a-474e-b86a-f85079b23081-config-data\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.552814 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9140b0c4-6589-4b25-b300-0d4421daca16-config-data-custom\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.552834 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72bhr\" (UniqueName: \"kubernetes.io/projected/b8eb7caa-f57a-474e-b86a-f85079b23081-kube-api-access-72bhr\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.552853 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gsnk\" (UniqueName: \"kubernetes.io/projected/9140b0c4-6589-4b25-b300-0d4421daca16-kube-api-access-8gsnk\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.557438 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-fff92"] Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.558774 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.572078 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-fff92"] Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.665016 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8eb7caa-f57a-474e-b86a-f85079b23081-config-data\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.665155 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9140b0c4-6589-4b25-b300-0d4421daca16-config-data-custom\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.665178 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72bhr\" (UniqueName: \"kubernetes.io/projected/b8eb7caa-f57a-474e-b86a-f85079b23081-kube-api-access-72bhr\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.665283 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gsnk\" (UniqueName: \"kubernetes.io/projected/9140b0c4-6589-4b25-b300-0d4421daca16-kube-api-access-8gsnk\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.665325 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8eb7caa-f57a-474e-b86a-f85079b23081-combined-ca-bundle\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.665443 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9140b0c4-6589-4b25-b300-0d4421daca16-logs\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.665569 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9140b0c4-6589-4b25-b300-0d4421daca16-config-data\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.665637 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9140b0c4-6589-4b25-b300-0d4421daca16-combined-ca-bundle\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.665740 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7caa-f57a-474e-b86a-f85079b23081-logs\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.670288 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8eb7caa-f57a-474e-b86a-f85079b23081-config-data-custom\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.707378 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.713322 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.758692 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9140b0c4-6589-4b25-b300-0d4421daca16-logs\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.763822 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8eb7caa-f57a-474e-b86a-f85079b23081-logs\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.809024 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.812921 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8eb7caa-f57a-474e-b86a-f85079b23081-combined-ca-bundle\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.812979 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8eb7caa-f57a-474e-b86a-f85079b23081-config-data\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.823260 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8eb7caa-f57a-474e-b86a-f85079b23081-config-data-custom\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.864023 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7888867b8d-8s7qr"] Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.866619 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7888867b8d-8s7qr"] Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.881358 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9140b0c4-6589-4b25-b300-0d4421daca16-config-data\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.881434 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.886079 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.889896 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9140b0c4-6589-4b25-b300-0d4421daca16-config-data-custom\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.896062 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9140b0c4-6589-4b25-b300-0d4421daca16-combined-ca-bundle\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.902561 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.909813 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.909851 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvlzd\" (UniqueName: \"kubernetes.io/projected/dc09218d-9cf9-4af1-9452-9f33c11754e3-kube-api-access-qvlzd\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.909918 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.909947 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.909981 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-config\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.910002 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.914311 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72bhr\" (UniqueName: \"kubernetes.io/projected/b8eb7caa-f57a-474e-b86a-f85079b23081-kube-api-access-72bhr\") pod \"barbican-worker-6f9d95566c-lfqlc\" (UID: \"b8eb7caa-f57a-474e-b86a-f85079b23081\") " pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.915198 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.933309 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.936739 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.936927 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-q8t6j" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.937126 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gsnk\" (UniqueName: \"kubernetes.io/projected/9140b0c4-6589-4b25-b300-0d4421daca16-kube-api-access-8gsnk\") pod \"barbican-keystone-listener-c5fc5f7c4-cvl68\" (UID: \"9140b0c4-6589-4b25-b300-0d4421daca16\") " pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.937183 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.937291 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.995487 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-7cqzv" Oct 02 21:43:29 crc kubenswrapper[4636]: I1002 21:43:29.996832 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6f9d95566c-lfqlc" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.012811 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-scripts\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.013035 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-combined-ca-bundle\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.020347 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.020458 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.020557 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.020648 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.020728 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.020847 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.020941 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/008c551b-ebc5-476f-8445-5c976ca7ce57-logs\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.021058 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-config\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.021136 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.021211 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr22c\" (UniqueName: \"kubernetes.io/projected/008c551b-ebc5-476f-8445-5c976ca7ce57-kube-api-access-dr22c\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.021292 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data-custom\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.021367 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c5f19df9-144a-454c-afe1-08fa91b9312d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.021486 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjxv4\" (UniqueName: \"kubernetes.io/projected/c5f19df9-144a-454c-afe1-08fa91b9312d-kube-api-access-mjxv4\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.021570 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.021637 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvlzd\" (UniqueName: \"kubernetes.io/projected/dc09218d-9cf9-4af1-9452-9f33c11754e3-kube-api-access-qvlzd\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.022233 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.032646 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-fff92"] Oct 02 21:43:30 crc kubenswrapper[4636]: E1002 21:43:30.033250 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc dns-swift-storage-0 kube-api-access-qvlzd ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-59d5ff467f-fff92" podUID="dc09218d-9cf9-4af1-9452-9f33c11754e3" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.033608 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.040243 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-config\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.047373 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.054479 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-n7bw7"] Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.057435 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.059830 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.066825 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvlzd\" (UniqueName: \"kubernetes.io/projected/dc09218d-9cf9-4af1-9452-9f33c11754e3-kube-api-access-qvlzd\") pod \"dnsmasq-dns-59d5ff467f-fff92\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.066862 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.075377 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-n7bw7"] Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124579 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-scripts\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124635 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prjr8\" (UniqueName: \"kubernetes.io/projected/d92db512-14f3-4d38-bcbf-f223af634dcd-kube-api-access-prjr8\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124659 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-combined-ca-bundle\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124682 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-swift-storage-0\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124714 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124729 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124766 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124786 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124808 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/008c551b-ebc5-476f-8445-5c976ca7ce57-logs\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124833 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-svc\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124855 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-config\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124877 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr22c\" (UniqueName: \"kubernetes.io/projected/008c551b-ebc5-476f-8445-5c976ca7ce57-kube-api-access-dr22c\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124892 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data-custom\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124908 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-nb\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124926 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c5f19df9-144a-454c-afe1-08fa91b9312d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124963 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjxv4\" (UniqueName: \"kubernetes.io/projected/c5f19df9-144a-454c-afe1-08fa91b9312d-kube-api-access-mjxv4\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.124996 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-sb\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.126122 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/008c551b-ebc5-476f-8445-5c976ca7ce57-logs\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.130028 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-scripts\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.137299 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.138390 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-combined-ca-bundle\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.143265 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c5f19df9-144a-454c-afe1-08fa91b9312d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.147830 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.148294 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data-custom\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.154511 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.155048 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.159158 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr22c\" (UniqueName: \"kubernetes.io/projected/008c551b-ebc5-476f-8445-5c976ca7ce57-kube-api-access-dr22c\") pod \"barbican-api-7888867b8d-8s7qr\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.174727 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjxv4\" (UniqueName: \"kubernetes.io/projected/c5f19df9-144a-454c-afe1-08fa91b9312d-kube-api-access-mjxv4\") pod \"cinder-scheduler-0\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.226860 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prjr8\" (UniqueName: \"kubernetes.io/projected/d92db512-14f3-4d38-bcbf-f223af634dcd-kube-api-access-prjr8\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.227115 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-swift-storage-0\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.227186 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-svc\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.227271 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-config\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.227314 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-nb\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.227377 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-sb\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.228322 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-svc\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.229016 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-config\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.230545 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-nb\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.231545 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-swift-storage-0\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.232147 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-sb\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.240508 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.245054 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.266027 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.266126 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.284937 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.308585 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prjr8\" (UniqueName: \"kubernetes.io/projected/d92db512-14f3-4d38-bcbf-f223af634dcd-kube-api-access-prjr8\") pod \"dnsmasq-dns-69c986f6d7-n7bw7\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.330032 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.330949 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c24327e-435e-47c7-9547-904524571570-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.330521 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.352260 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.352808 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcf6x\" (UniqueName: \"kubernetes.io/projected/9c24327e-435e-47c7-9547-904524571570-kube-api-access-wcf6x\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.352895 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c24327e-435e-47c7-9547-904524571570-logs\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.352952 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.352989 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-scripts\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.353036 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data-custom\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.478807 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.478875 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c24327e-435e-47c7-9547-904524571570-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.478947 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcf6x\" (UniqueName: \"kubernetes.io/projected/9c24327e-435e-47c7-9547-904524571570-kube-api-access-wcf6x\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.479029 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c24327e-435e-47c7-9547-904524571570-logs\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.479072 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.479095 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-scripts\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.479119 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c24327e-435e-47c7-9547-904524571570-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.479134 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data-custom\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.485532 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.485978 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c24327e-435e-47c7-9547-904524571570-logs\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.491300 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-scripts\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.495971 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.498258 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data-custom\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.511886 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcf6x\" (UniqueName: \"kubernetes.io/projected/9c24327e-435e-47c7-9547-904524571570-kube-api-access-wcf6x\") pod \"cinder-api-0\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.533774 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.533800 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.534005 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.534043 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.534880 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.590527 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.648737 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.688130 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvlzd\" (UniqueName: \"kubernetes.io/projected/dc09218d-9cf9-4af1-9452-9f33c11754e3-kube-api-access-qvlzd\") pod \"dc09218d-9cf9-4af1-9452-9f33c11754e3\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.688224 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-nb\") pod \"dc09218d-9cf9-4af1-9452-9f33c11754e3\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.688250 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-config\") pod \"dc09218d-9cf9-4af1-9452-9f33c11754e3\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.688334 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-sb\") pod \"dc09218d-9cf9-4af1-9452-9f33c11754e3\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.688420 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-svc\") pod \"dc09218d-9cf9-4af1-9452-9f33c11754e3\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.688475 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-swift-storage-0\") pod \"dc09218d-9cf9-4af1-9452-9f33c11754e3\" (UID: \"dc09218d-9cf9-4af1-9452-9f33c11754e3\") " Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.692766 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-config" (OuterVolumeSpecName: "config") pod "dc09218d-9cf9-4af1-9452-9f33c11754e3" (UID: "dc09218d-9cf9-4af1-9452-9f33c11754e3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.692945 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "dc09218d-9cf9-4af1-9452-9f33c11754e3" (UID: "dc09218d-9cf9-4af1-9452-9f33c11754e3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.697779 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dc09218d-9cf9-4af1-9452-9f33c11754e3" (UID: "dc09218d-9cf9-4af1-9452-9f33c11754e3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.698171 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dc09218d-9cf9-4af1-9452-9f33c11754e3" (UID: "dc09218d-9cf9-4af1-9452-9f33c11754e3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.698710 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dc09218d-9cf9-4af1-9452-9f33c11754e3" (UID: "dc09218d-9cf9-4af1-9452-9f33c11754e3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.700787 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc09218d-9cf9-4af1-9452-9f33c11754e3-kube-api-access-qvlzd" (OuterVolumeSpecName: "kube-api-access-qvlzd") pod "dc09218d-9cf9-4af1-9452-9f33c11754e3" (UID: "dc09218d-9cf9-4af1-9452-9f33c11754e3"). InnerVolumeSpecName "kube-api-access-qvlzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.801851 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.801909 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.801920 4636 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.801935 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvlzd\" (UniqueName: \"kubernetes.io/projected/dc09218d-9cf9-4af1-9452-9f33c11754e3-kube-api-access-qvlzd\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.801944 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.801953 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc09218d-9cf9-4af1-9452-9f33c11754e3-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:30 crc kubenswrapper[4636]: W1002 21:43:30.866779 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8eb7caa_f57a_474e_b86a_f85079b23081.slice/crio-b72547f8efc0aa018da8bfb869122436c3aa73e9973528d638812dd5739a071f WatchSource:0}: Error finding container b72547f8efc0aa018da8bfb869122436c3aa73e9973528d638812dd5739a071f: Status 404 returned error can't find the container with id b72547f8efc0aa018da8bfb869122436c3aa73e9973528d638812dd5739a071f Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.892067 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6f9d95566c-lfqlc"] Oct 02 21:43:30 crc kubenswrapper[4636]: I1002 21:43:30.948573 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.132525 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-c5fc5f7c4-cvl68"] Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.350176 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7888867b8d-8s7qr"] Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.378214 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-n7bw7"] Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.469137 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 02 21:43:31 crc kubenswrapper[4636]: W1002 21:43:31.501664 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c24327e_435e_47c7_9547_904524571570.slice/crio-26db4c41f3d666e1097fa5313000ddd0d62ef46a50b55d2cd5aaa95e527cb682 WatchSource:0}: Error finding container 26db4c41f3d666e1097fa5313000ddd0d62ef46a50b55d2cd5aaa95e527cb682: Status 404 returned error can't find the container with id 26db4c41f3d666e1097fa5313000ddd0d62ef46a50b55d2cd5aaa95e527cb682 Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.556930 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.563971 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7888867b8d-8s7qr" event={"ID":"008c551b-ebc5-476f-8445-5c976ca7ce57","Type":"ContainerStarted","Data":"d449d8715941effba59826738247c29837dd74111fd98f6b2e57559e9765d36b"} Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.623299 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-combined-ca-bundle\") pod \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.623344 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-sg-core-conf-yaml\") pod \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.623385 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9xxz\" (UniqueName: \"kubernetes.io/projected/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-kube-api-access-b9xxz\") pod \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.623440 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-log-httpd\") pod \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.623544 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-run-httpd\") pod \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.623630 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-config-data\") pod \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.623682 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-scripts\") pod \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\" (UID: \"3f357c6e-1e7b-42d0-9719-396c2e9c89d2\") " Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.625017 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3f357c6e-1e7b-42d0-9719-396c2e9c89d2" (UID: "3f357c6e-1e7b-42d0-9719-396c2e9c89d2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.625312 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3f357c6e-1e7b-42d0-9719-396c2e9c89d2" (UID: "3f357c6e-1e7b-42d0-9719-396c2e9c89d2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.637445 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c24327e-435e-47c7-9547-904524571570","Type":"ContainerStarted","Data":"26db4c41f3d666e1097fa5313000ddd0d62ef46a50b55d2cd5aaa95e527cb682"} Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.637480 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" event={"ID":"9140b0c4-6589-4b25-b300-0d4421daca16","Type":"ContainerStarted","Data":"c26dbdd170556905908a64880bc7405ad44a78d11c90190b0f1a840f98773b56"} Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.650986 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-scripts" (OuterVolumeSpecName: "scripts") pod "3f357c6e-1e7b-42d0-9719-396c2e9c89d2" (UID: "3f357c6e-1e7b-42d0-9719-396c2e9c89d2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.657192 4636 generic.go:334] "Generic (PLEG): container finished" podID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerID="f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245" exitCode=0 Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.657422 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f357c6e-1e7b-42d0-9719-396c2e9c89d2","Type":"ContainerDied","Data":"f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245"} Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.657457 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f357c6e-1e7b-42d0-9719-396c2e9c89d2","Type":"ContainerDied","Data":"ca6fd91b6e47661a8ffdd8a1e656897316734452234d7e2c4e2caa3d53baa0f4"} Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.657476 4636 scope.go:117] "RemoveContainer" containerID="f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.657588 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.658182 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-kube-api-access-b9xxz" (OuterVolumeSpecName: "kube-api-access-b9xxz") pod "3f357c6e-1e7b-42d0-9719-396c2e9c89d2" (UID: "3f357c6e-1e7b-42d0-9719-396c2e9c89d2"). InnerVolumeSpecName "kube-api-access-b9xxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.663397 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" event={"ID":"d92db512-14f3-4d38-bcbf-f223af634dcd","Type":"ContainerStarted","Data":"2cc65b7dd100a84087df777f4bde22f5e2057af27d2dcfaafdc78ef1d0751d5e"} Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.677940 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f9d95566c-lfqlc" event={"ID":"b8eb7caa-f57a-474e-b86a-f85079b23081","Type":"ContainerStarted","Data":"b72547f8efc0aa018da8bfb869122436c3aa73e9973528d638812dd5739a071f"} Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.721958 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-fff92" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.722177 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c5f19df9-144a-454c-afe1-08fa91b9312d","Type":"ContainerStarted","Data":"4e02d0db0800dd2ff56191a069b34d0382b8f85a395360993d266caa1eaa32de"} Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.724178 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3f357c6e-1e7b-42d0-9719-396c2e9c89d2" (UID: "3f357c6e-1e7b-42d0-9719-396c2e9c89d2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.725371 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.725397 4636 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.725407 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9xxz\" (UniqueName: \"kubernetes.io/projected/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-kube-api-access-b9xxz\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.725415 4636 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.725423 4636 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.848494 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-fff92"] Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.864943 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f357c6e-1e7b-42d0-9719-396c2e9c89d2" (UID: "3f357c6e-1e7b-42d0-9719-396c2e9c89d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.883569 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-fff92"] Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.893422 4636 scope.go:117] "RemoveContainer" containerID="9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.930117 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.966542 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-config-data" (OuterVolumeSpecName: "config-data") pod "3f357c6e-1e7b-42d0-9719-396c2e9c89d2" (UID: "3f357c6e-1e7b-42d0-9719-396c2e9c89d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:31 crc kubenswrapper[4636]: I1002 21:43:31.972406 4636 scope.go:117] "RemoveContainer" containerID="f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.035883 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f357c6e-1e7b-42d0-9719-396c2e9c89d2-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.197858 4636 scope.go:117] "RemoveContainer" containerID="65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.272709 4636 scope.go:117] "RemoveContainer" containerID="f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07" Oct 02 21:43:32 crc kubenswrapper[4636]: E1002 21:43:32.278801 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07\": container with ID starting with f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07 not found: ID does not exist" containerID="f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.278844 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07"} err="failed to get container status \"f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07\": rpc error: code = NotFound desc = could not find container \"f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07\": container with ID starting with f51756ea16cbbc0e5eef87faeb1e1dd1a07de4bcb6dae018f10d1620ad56fc07 not found: ID does not exist" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.278868 4636 scope.go:117] "RemoveContainer" containerID="9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b" Oct 02 21:43:32 crc kubenswrapper[4636]: E1002 21:43:32.287135 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b\": container with ID starting with 9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b not found: ID does not exist" containerID="9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.287165 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b"} err="failed to get container status \"9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b\": rpc error: code = NotFound desc = could not find container \"9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b\": container with ID starting with 9cfbc1b0d069ffede8a08477f55f347dd849d1b384029f0723c156dea12c6d8b not found: ID does not exist" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.287185 4636 scope.go:117] "RemoveContainer" containerID="f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245" Oct 02 21:43:32 crc kubenswrapper[4636]: E1002 21:43:32.289275 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245\": container with ID starting with f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245 not found: ID does not exist" containerID="f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.289303 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245"} err="failed to get container status \"f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245\": rpc error: code = NotFound desc = could not find container \"f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245\": container with ID starting with f6dbf03f7127fedbd6292977535f32dea4fd0fd18c3eddbd08d22eb8cfcc8245 not found: ID does not exist" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.289323 4636 scope.go:117] "RemoveContainer" containerID="65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35" Oct 02 21:43:32 crc kubenswrapper[4636]: E1002 21:43:32.296490 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35\": container with ID starting with 65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35 not found: ID does not exist" containerID="65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.296721 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35"} err="failed to get container status \"65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35\": rpc error: code = NotFound desc = could not find container \"65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35\": container with ID starting with 65d8ac98014f7f0ede71716205cac97f4892b216f2a99c4e7314710b7f52fd35 not found: ID does not exist" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.302620 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.328584 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.363605 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:43:32 crc kubenswrapper[4636]: E1002 21:43:32.364318 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="ceilometer-central-agent" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.364334 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="ceilometer-central-agent" Oct 02 21:43:32 crc kubenswrapper[4636]: E1002 21:43:32.364349 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="ceilometer-notification-agent" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.364354 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="ceilometer-notification-agent" Oct 02 21:43:32 crc kubenswrapper[4636]: E1002 21:43:32.364378 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="sg-core" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.364384 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="sg-core" Oct 02 21:43:32 crc kubenswrapper[4636]: E1002 21:43:32.364400 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="proxy-httpd" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.364405 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="proxy-httpd" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.364569 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="sg-core" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.364583 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="ceilometer-central-agent" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.364599 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="ceilometer-notification-agent" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.364616 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" containerName="proxy-httpd" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.366249 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.371493 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.374362 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.375195 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.466896 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-config-data\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.467007 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-494wb\" (UniqueName: \"kubernetes.io/projected/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-kube-api-access-494wb\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.467027 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.467390 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-scripts\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.467415 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-log-httpd\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.467430 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-run-httpd\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.467514 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.573014 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-scripts\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.573060 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-log-httpd\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.573080 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-run-httpd\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.573136 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.573159 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-config-data\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.573208 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-494wb\" (UniqueName: \"kubernetes.io/projected/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-kube-api-access-494wb\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.573225 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.577148 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-log-httpd\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.577210 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-run-httpd\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.596481 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-scripts\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.599472 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.599897 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-config-data\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.610887 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.613341 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-494wb\" (UniqueName: \"kubernetes.io/projected/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-kube-api-access-494wb\") pod \"ceilometer-0\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.737205 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.772051 4636 generic.go:334] "Generic (PLEG): container finished" podID="d92db512-14f3-4d38-bcbf-f223af634dcd" containerID="e5a17ec2a6fb2cf04c70bb09cc570fe23e40a2e77ac25f0d1caa8b5b7de1221e" exitCode=0 Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.772111 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" event={"ID":"d92db512-14f3-4d38-bcbf-f223af634dcd","Type":"ContainerDied","Data":"e5a17ec2a6fb2cf04c70bb09cc570fe23e40a2e77ac25f0d1caa8b5b7de1221e"} Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.788309 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7888867b8d-8s7qr" event={"ID":"008c551b-ebc5-476f-8445-5c976ca7ce57","Type":"ContainerStarted","Data":"ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36"} Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.788357 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7888867b8d-8s7qr" event={"ID":"008c551b-ebc5-476f-8445-5c976ca7ce57","Type":"ContainerStarted","Data":"92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2"} Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.788396 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.788516 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:32 crc kubenswrapper[4636]: I1002 21:43:32.818800 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7888867b8d-8s7qr" podStartSLOduration=3.81878588 podStartE2EDuration="3.81878588s" podCreationTimestamp="2025-10-02 21:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:32.810469018 +0000 UTC m=+1204.133477037" watchObservedRunningTime="2025-10-02 21:43:32.81878588 +0000 UTC m=+1204.141793899" Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.044220 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.484090 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7646d88f4d-85mgl" podUID="65063729-cda3-488f-8e94-364db15e2d2d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.559173 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.600380 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.624373 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f357c6e-1e7b-42d0-9719-396c2e9c89d2" path="/var/lib/kubelet/pods/3f357c6e-1e7b-42d0-9719-396c2e9c89d2/volumes" Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.625721 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc09218d-9cf9-4af1-9452-9f33c11754e3" path="/var/lib/kubelet/pods/dc09218d-9cf9-4af1-9452-9f33c11754e3/volumes" Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.895519 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" event={"ID":"d92db512-14f3-4d38-bcbf-f223af634dcd","Type":"ContainerStarted","Data":"6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b"} Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.897104 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.910798 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efc87c6-ceca-46c0-a8ef-c24a44ae735d","Type":"ContainerStarted","Data":"4cc3fb7290b22e6c9acb7150da97dbfc2ad5cbd693b66e27e61c935e5726085d"} Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.942230 4636 generic.go:334] "Generic (PLEG): container finished" podID="a20af78e-6d87-4dfc-82a5-e0290aa65ec0" containerID="85d988411f3419667e1747c1bd9c3495814ed5ccf2574591af7f96da5dd6c6da" exitCode=137 Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.942260 4636 generic.go:334] "Generic (PLEG): container finished" podID="a20af78e-6d87-4dfc-82a5-e0290aa65ec0" containerID="514ce2740ae8358ce364a1a876684bfee308392063049d40b0cd2762caa68e9a" exitCode=137 Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.942303 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-59849968b5-h7qdr" event={"ID":"a20af78e-6d87-4dfc-82a5-e0290aa65ec0","Type":"ContainerDied","Data":"85d988411f3419667e1747c1bd9c3495814ed5ccf2574591af7f96da5dd6c6da"} Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.942330 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-59849968b5-h7qdr" event={"ID":"a20af78e-6d87-4dfc-82a5-e0290aa65ec0","Type":"ContainerDied","Data":"514ce2740ae8358ce364a1a876684bfee308392063049d40b0cd2762caa68e9a"} Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.944117 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c5f19df9-144a-454c-afe1-08fa91b9312d","Type":"ContainerStarted","Data":"71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2"} Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.945462 4636 generic.go:334] "Generic (PLEG): container finished" podID="1b773903-9fdb-4fdd-97b5-1c89103b3a0b" containerID="f30e5ff61f2780092fb590bcce015ccbde7c6ea6989eb15df76fa5a86d18e767" exitCode=0 Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.945497 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4vrl8" event={"ID":"1b773903-9fdb-4fdd-97b5-1c89103b3a0b","Type":"ContainerDied","Data":"f30e5ff61f2780092fb590bcce015ccbde7c6ea6989eb15df76fa5a86d18e767"} Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.949710 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c24327e-435e-47c7-9547-904524571570","Type":"ContainerStarted","Data":"a5539000836254d6a2cb5ac9a68120dab231e2fd56b9504b7473559513fbeefe"} Oct 02 21:43:33 crc kubenswrapper[4636]: I1002 21:43:33.964121 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" podStartSLOduration=3.9641031079999998 podStartE2EDuration="3.964103108s" podCreationTimestamp="2025-10-02 21:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:33.924070699 +0000 UTC m=+1205.247078718" watchObservedRunningTime="2025-10-02 21:43:33.964103108 +0000 UTC m=+1205.287111127" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.357261 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.449042 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4p7q\" (UniqueName: \"kubernetes.io/projected/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-kube-api-access-n4p7q\") pod \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.449112 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-scripts\") pod \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.449160 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-config-data\") pod \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.449239 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-horizon-secret-key\") pod \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.449281 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-logs\") pod \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\" (UID: \"a20af78e-6d87-4dfc-82a5-e0290aa65ec0\") " Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.450179 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-logs" (OuterVolumeSpecName: "logs") pod "a20af78e-6d87-4dfc-82a5-e0290aa65ec0" (UID: "a20af78e-6d87-4dfc-82a5-e0290aa65ec0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.487865 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a20af78e-6d87-4dfc-82a5-e0290aa65ec0" (UID: "a20af78e-6d87-4dfc-82a5-e0290aa65ec0"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.487981 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-kube-api-access-n4p7q" (OuterVolumeSpecName: "kube-api-access-n4p7q") pod "a20af78e-6d87-4dfc-82a5-e0290aa65ec0" (UID: "a20af78e-6d87-4dfc-82a5-e0290aa65ec0"). InnerVolumeSpecName "kube-api-access-n4p7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.504339 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-config-data" (OuterVolumeSpecName: "config-data") pod "a20af78e-6d87-4dfc-82a5-e0290aa65ec0" (UID: "a20af78e-6d87-4dfc-82a5-e0290aa65ec0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.510570 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-scripts" (OuterVolumeSpecName: "scripts") pod "a20af78e-6d87-4dfc-82a5-e0290aa65ec0" (UID: "a20af78e-6d87-4dfc-82a5-e0290aa65ec0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.551642 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4p7q\" (UniqueName: \"kubernetes.io/projected/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-kube-api-access-n4p7q\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.551672 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.551683 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.551695 4636 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.551706 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20af78e-6d87-4dfc-82a5-e0290aa65ec0-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.736779 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.736908 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.779576 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.780004 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:43:34 crc kubenswrapper[4636]: I1002 21:43:34.797156 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.029962 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-59849968b5-h7qdr" event={"ID":"a20af78e-6d87-4dfc-82a5-e0290aa65ec0","Type":"ContainerDied","Data":"9caec0f45685601d587b6dd32f0f0200bcd50f11e70476c1c81e96cc3eeb276c"} Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.030285 4636 scope.go:117] "RemoveContainer" containerID="85d988411f3419667e1747c1bd9c3495814ed5ccf2574591af7f96da5dd6c6da" Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.030561 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59849968b5-h7qdr" Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.082859 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c24327e-435e-47c7-9547-904524571570","Type":"ContainerStarted","Data":"2f92401e7a0a1d8862442efa91362be0515ccdffaf59deb392712f609a84f32d"} Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.083051 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api-log" containerID="cri-o://a5539000836254d6a2cb5ac9a68120dab231e2fd56b9504b7473559513fbeefe" gracePeriod=30 Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.083160 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.083465 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api" containerID="cri-o://2f92401e7a0a1d8862442efa91362be0515ccdffaf59deb392712f609a84f32d" gracePeriod=30 Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.096959 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-59849968b5-h7qdr"] Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.109136 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-59849968b5-h7qdr"] Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.123823 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.12380716 podStartE2EDuration="5.12380716s" podCreationTimestamp="2025-10-02 21:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:35.115037906 +0000 UTC m=+1206.438045925" watchObservedRunningTime="2025-10-02 21:43:35.12380716 +0000 UTC m=+1206.446815179" Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.180027 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 02 21:43:35 crc kubenswrapper[4636]: I1002 21:43:35.619225 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a20af78e-6d87-4dfc-82a5-e0290aa65ec0" path="/var/lib/kubelet/pods/a20af78e-6d87-4dfc-82a5-e0290aa65ec0/volumes" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.092932 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c5f19df9-144a-454c-afe1-08fa91b9312d","Type":"ContainerStarted","Data":"31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd"} Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.095710 4636 generic.go:334] "Generic (PLEG): container finished" podID="9c24327e-435e-47c7-9547-904524571570" containerID="a5539000836254d6a2cb5ac9a68120dab231e2fd56b9504b7473559513fbeefe" exitCode=143 Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.095799 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c24327e-435e-47c7-9547-904524571570","Type":"ContainerDied","Data":"a5539000836254d6a2cb5ac9a68120dab231e2fd56b9504b7473559513fbeefe"} Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.386710 4636 scope.go:117] "RemoveContainer" containerID="514ce2740ae8358ce364a1a876684bfee308392063049d40b0cd2762caa68e9a" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.393817 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=6.279362663 podStartE2EDuration="7.393799076s" podCreationTimestamp="2025-10-02 21:43:29 +0000 UTC" firstStartedPulling="2025-10-02 21:43:30.953161402 +0000 UTC m=+1202.276169421" lastFinishedPulling="2025-10-02 21:43:32.067597815 +0000 UTC m=+1203.390605834" observedRunningTime="2025-10-02 21:43:36.114508979 +0000 UTC m=+1207.437517008" watchObservedRunningTime="2025-10-02 21:43:36.393799076 +0000 UTC m=+1207.716807095" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.400893 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-54b7f7458b-qqbgx"] Oct 02 21:43:36 crc kubenswrapper[4636]: E1002 21:43:36.401301 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a20af78e-6d87-4dfc-82a5-e0290aa65ec0" containerName="horizon-log" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.401321 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="a20af78e-6d87-4dfc-82a5-e0290aa65ec0" containerName="horizon-log" Oct 02 21:43:36 crc kubenswrapper[4636]: E1002 21:43:36.401345 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a20af78e-6d87-4dfc-82a5-e0290aa65ec0" containerName="horizon" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.401353 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="a20af78e-6d87-4dfc-82a5-e0290aa65ec0" containerName="horizon" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.401526 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="a20af78e-6d87-4dfc-82a5-e0290aa65ec0" containerName="horizon" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.401548 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="a20af78e-6d87-4dfc-82a5-e0290aa65ec0" containerName="horizon-log" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.402510 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.406192 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.406528 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.416980 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54b7f7458b-qqbgx"] Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.502791 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-internal-tls-certs\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.503281 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59bde702-8c1a-491a-a1f8-0c0862f05d77-logs\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.503441 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-combined-ca-bundle\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.503576 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-public-tls-certs\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.503656 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-config-data-custom\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.503734 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfdjn\" (UniqueName: \"kubernetes.io/projected/59bde702-8c1a-491a-a1f8-0c0862f05d77-kube-api-access-cfdjn\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.503840 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-config-data\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.531706 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.604425 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-config\") pod \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.604521 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh42r\" (UniqueName: \"kubernetes.io/projected/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-kube-api-access-gh42r\") pod \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.604554 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-combined-ca-bundle\") pod \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\" (UID: \"1b773903-9fdb-4fdd-97b5-1c89103b3a0b\") " Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.604765 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-combined-ca-bundle\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.604811 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-public-tls-certs\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.604829 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-config-data-custom\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.604844 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfdjn\" (UniqueName: \"kubernetes.io/projected/59bde702-8c1a-491a-a1f8-0c0862f05d77-kube-api-access-cfdjn\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.604864 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-config-data\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.604921 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-internal-tls-certs\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.604942 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59bde702-8c1a-491a-a1f8-0c0862f05d77-logs\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.605308 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59bde702-8c1a-491a-a1f8-0c0862f05d77-logs\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.613691 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-config-data-custom\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.620396 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-public-tls-certs\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.625421 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-config-data\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.625920 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-combined-ca-bundle\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.626044 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/59bde702-8c1a-491a-a1f8-0c0862f05d77-internal-tls-certs\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.626135 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-kube-api-access-gh42r" (OuterVolumeSpecName: "kube-api-access-gh42r") pod "1b773903-9fdb-4fdd-97b5-1c89103b3a0b" (UID: "1b773903-9fdb-4fdd-97b5-1c89103b3a0b"). InnerVolumeSpecName "kube-api-access-gh42r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.674328 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1b773903-9fdb-4fdd-97b5-1c89103b3a0b" (UID: "1b773903-9fdb-4fdd-97b5-1c89103b3a0b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.675152 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-config" (OuterVolumeSpecName: "config") pod "1b773903-9fdb-4fdd-97b5-1c89103b3a0b" (UID: "1b773903-9fdb-4fdd-97b5-1c89103b3a0b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.675633 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfdjn\" (UniqueName: \"kubernetes.io/projected/59bde702-8c1a-491a-a1f8-0c0862f05d77-kube-api-access-cfdjn\") pod \"barbican-api-54b7f7458b-qqbgx\" (UID: \"59bde702-8c1a-491a-a1f8-0c0862f05d77\") " pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.706442 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.706488 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh42r\" (UniqueName: \"kubernetes.io/projected/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-kube-api-access-gh42r\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.706501 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b773903-9fdb-4fdd-97b5-1c89103b3a0b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:36 crc kubenswrapper[4636]: I1002 21:43:36.818194 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.114482 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4vrl8" event={"ID":"1b773903-9fdb-4fdd-97b5-1c89103b3a0b","Type":"ContainerDied","Data":"ced38f16dacf47de76216a6f42b4788a292e8fcbb2fa991fff63d99172a9caae"} Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.114794 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ced38f16dacf47de76216a6f42b4788a292e8fcbb2fa991fff63d99172a9caae" Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.114495 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4vrl8" Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.123712 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efc87c6-ceca-46c0-a8ef-c24a44ae735d","Type":"ContainerStarted","Data":"6fd7ff1db052304bfb25ae9eaabb2657ba9c874363cea46c3e0dccec4a308318"} Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.735526 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54b7f7458b-qqbgx"] Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.820709 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-n7bw7"] Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.821202 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" podUID="d92db512-14f3-4d38-bcbf-f223af634dcd" containerName="dnsmasq-dns" containerID="cri-o://6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b" gracePeriod=10 Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.967937 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-75d644d86d-xqtl4"] Oct 02 21:43:37 crc kubenswrapper[4636]: E1002 21:43:37.968333 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b773903-9fdb-4fdd-97b5-1c89103b3a0b" containerName="neutron-db-sync" Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.968350 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b773903-9fdb-4fdd-97b5-1c89103b3a0b" containerName="neutron-db-sync" Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.968641 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b773903-9fdb-4fdd-97b5-1c89103b3a0b" containerName="neutron-db-sync" Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.969617 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.976580 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.976870 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-cwlxm" Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.977021 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.977129 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 02 21:43:37 crc kubenswrapper[4636]: I1002 21:43:37.993001 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-xkn2n"] Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.001938 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.043629 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-xkn2n"] Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.047301 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6897cb4484-tthsj" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.054850 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nnrc\" (UniqueName: \"kubernetes.io/projected/6e944a52-400a-4253-afad-9c8ab5abd662-kube-api-access-9nnrc\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.054897 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.054937 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-httpd-config\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.054996 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.055021 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-ovndb-tls-certs\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.055052 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-config\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.055066 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-combined-ca-bundle\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.055084 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-config\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.055108 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.055132 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpqpc\" (UniqueName: \"kubernetes.io/projected/faafa825-e4c3-45ed-92f6-1798993353d0-kube-api-access-hpqpc\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.055164 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-svc\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.105837 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-75d644d86d-xqtl4"] Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.156232 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-svc\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.156290 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nnrc\" (UniqueName: \"kubernetes.io/projected/6e944a52-400a-4253-afad-9c8ab5abd662-kube-api-access-9nnrc\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.156309 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.156345 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-httpd-config\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.156401 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.156424 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-ovndb-tls-certs\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.156455 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-config\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.156473 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-combined-ca-bundle\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.156521 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-config\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.156544 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.156574 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpqpc\" (UniqueName: \"kubernetes.io/projected/faafa825-e4c3-45ed-92f6-1798993353d0-kube-api-access-hpqpc\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.157702 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-svc\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.158399 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.158629 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-config\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.159283 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.166578 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-ovndb-tls-certs\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.172073 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.201488 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nnrc\" (UniqueName: \"kubernetes.io/projected/6e944a52-400a-4253-afad-9c8ab5abd662-kube-api-access-9nnrc\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.204166 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" event={"ID":"9140b0c4-6589-4b25-b300-0d4421daca16","Type":"ContainerStarted","Data":"a59f994c79e62e734f178078424d5eaa1fd1450e6f1539c1de48e57a208906f7"} Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.219441 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-config\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.222211 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f9d95566c-lfqlc" event={"ID":"b8eb7caa-f57a-474e-b86a-f85079b23081","Type":"ContainerStarted","Data":"21c2c3571ae86d70ccdcff62079f9b8b29f5d293fe218c178f9fbda4517d7a7f"} Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.226423 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-httpd-config\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.245014 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54b7f7458b-qqbgx" event={"ID":"59bde702-8c1a-491a-a1f8-0c0862f05d77","Type":"ContainerStarted","Data":"81256729d913467eed5d1f3883d16437107956884ad2e5673436a2ff02a61c5e"} Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.250109 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpqpc\" (UniqueName: \"kubernetes.io/projected/faafa825-e4c3-45ed-92f6-1798993353d0-kube-api-access-hpqpc\") pod \"dnsmasq-dns-5784cf869f-xkn2n\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.296800 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-combined-ca-bundle\") pod \"neutron-75d644d86d-xqtl4\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.341724 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.388567 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:38 crc kubenswrapper[4636]: I1002 21:43:38.951771 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.003504 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-nb\") pod \"d92db512-14f3-4d38-bcbf-f223af634dcd\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.003902 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-svc\") pod \"d92db512-14f3-4d38-bcbf-f223af634dcd\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.003990 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-sb\") pod \"d92db512-14f3-4d38-bcbf-f223af634dcd\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.004120 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-swift-storage-0\") pod \"d92db512-14f3-4d38-bcbf-f223af634dcd\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.004211 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-config\") pod \"d92db512-14f3-4d38-bcbf-f223af634dcd\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.004292 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prjr8\" (UniqueName: \"kubernetes.io/projected/d92db512-14f3-4d38-bcbf-f223af634dcd-kube-api-access-prjr8\") pod \"d92db512-14f3-4d38-bcbf-f223af634dcd\" (UID: \"d92db512-14f3-4d38-bcbf-f223af634dcd\") " Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.050953 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d92db512-14f3-4d38-bcbf-f223af634dcd-kube-api-access-prjr8" (OuterVolumeSpecName: "kube-api-access-prjr8") pod "d92db512-14f3-4d38-bcbf-f223af634dcd" (UID: "d92db512-14f3-4d38-bcbf-f223af634dcd"). InnerVolumeSpecName "kube-api-access-prjr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.146311 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prjr8\" (UniqueName: \"kubernetes.io/projected/d92db512-14f3-4d38-bcbf-f223af634dcd-kube-api-access-prjr8\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.243867 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-xkn2n"] Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.280262 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d92db512-14f3-4d38-bcbf-f223af634dcd" (UID: "d92db512-14f3-4d38-bcbf-f223af634dcd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.304707 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efc87c6-ceca-46c0-a8ef-c24a44ae735d","Type":"ContainerStarted","Data":"a1232d5d48cd847857afcb14518868fc759aca0848f2d02522fe374c24eecb6a"} Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.333017 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f9d95566c-lfqlc" event={"ID":"b8eb7caa-f57a-474e-b86a-f85079b23081","Type":"ContainerStarted","Data":"e34fd2f00e5ecdedd9bddcc1eecb8432fdaa088a3659facc304488103594efd9"} Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.347693 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54b7f7458b-qqbgx" event={"ID":"59bde702-8c1a-491a-a1f8-0c0862f05d77","Type":"ContainerStarted","Data":"b1e41bc18efcd5be0ccb6febcf4ceb86acfa77b55a76818c3e7726ec835cd9e5"} Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.354954 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.358475 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" event={"ID":"faafa825-e4c3-45ed-92f6-1798993353d0","Type":"ContainerStarted","Data":"25dab710bbcc8edff4c5329682c82be95261822eda3829a6d7389c800db24d11"} Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.381036 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d92db512-14f3-4d38-bcbf-f223af634dcd" (UID: "d92db512-14f3-4d38-bcbf-f223af634dcd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.389339 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" event={"ID":"9140b0c4-6589-4b25-b300-0d4421daca16","Type":"ContainerStarted","Data":"af907036a89e745c2754a4457d5c0fae47cf490427e08517c4b5338af9433c06"} Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.391607 4636 generic.go:334] "Generic (PLEG): container finished" podID="d92db512-14f3-4d38-bcbf-f223af634dcd" containerID="6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b" exitCode=0 Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.391635 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" event={"ID":"d92db512-14f3-4d38-bcbf-f223af634dcd","Type":"ContainerDied","Data":"6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b"} Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.391652 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" event={"ID":"d92db512-14f3-4d38-bcbf-f223af634dcd","Type":"ContainerDied","Data":"2cc65b7dd100a84087df777f4bde22f5e2057af27d2dcfaafdc78ef1d0751d5e"} Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.391668 4636 scope.go:117] "RemoveContainer" containerID="6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.391792 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69c986f6d7-n7bw7" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.401968 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6f9d95566c-lfqlc" podStartSLOduration=4.098839588 podStartE2EDuration="10.401944168s" podCreationTimestamp="2025-10-02 21:43:29 +0000 UTC" firstStartedPulling="2025-10-02 21:43:30.883910723 +0000 UTC m=+1202.206918742" lastFinishedPulling="2025-10-02 21:43:37.187015303 +0000 UTC m=+1208.510023322" observedRunningTime="2025-10-02 21:43:39.398959309 +0000 UTC m=+1210.721967328" watchObservedRunningTime="2025-10-02 21:43:39.401944168 +0000 UTC m=+1210.724952187" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.446283 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-config" (OuterVolumeSpecName: "config") pod "d92db512-14f3-4d38-bcbf-f223af634dcd" (UID: "d92db512-14f3-4d38-bcbf-f223af634dcd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.459663 4636 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.459725 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.471507 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d92db512-14f3-4d38-bcbf-f223af634dcd" (UID: "d92db512-14f3-4d38-bcbf-f223af634dcd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.472960 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d92db512-14f3-4d38-bcbf-f223af634dcd" (UID: "d92db512-14f3-4d38-bcbf-f223af634dcd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.518087 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-c5fc5f7c4-cvl68" podStartSLOduration=4.436604104 podStartE2EDuration="10.518069478s" podCreationTimestamp="2025-10-02 21:43:29 +0000 UTC" firstStartedPulling="2025-10-02 21:43:31.155407331 +0000 UTC m=+1202.478415340" lastFinishedPulling="2025-10-02 21:43:37.236872695 +0000 UTC m=+1208.559880714" observedRunningTime="2025-10-02 21:43:39.44923892 +0000 UTC m=+1210.772246959" watchObservedRunningTime="2025-10-02 21:43:39.518069478 +0000 UTC m=+1210.841077497" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.562346 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.562624 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d92db512-14f3-4d38-bcbf-f223af634dcd-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.636883 4636 scope.go:117] "RemoveContainer" containerID="e5a17ec2a6fb2cf04c70bb09cc570fe23e40a2e77ac25f0d1caa8b5b7de1221e" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.639130 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-75d644d86d-xqtl4"] Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.717596 4636 scope.go:117] "RemoveContainer" containerID="6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b" Oct 02 21:43:39 crc kubenswrapper[4636]: E1002 21:43:39.721301 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b\": container with ID starting with 6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b not found: ID does not exist" containerID="6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.721350 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b"} err="failed to get container status \"6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b\": rpc error: code = NotFound desc = could not find container \"6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b\": container with ID starting with 6ae0743e572cf89cd36dafeacfcec833bff0d18655556f70e6eccd22392ed03b not found: ID does not exist" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.721369 4636 scope.go:117] "RemoveContainer" containerID="e5a17ec2a6fb2cf04c70bb09cc570fe23e40a2e77ac25f0d1caa8b5b7de1221e" Oct 02 21:43:39 crc kubenswrapper[4636]: E1002 21:43:39.725050 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5a17ec2a6fb2cf04c70bb09cc570fe23e40a2e77ac25f0d1caa8b5b7de1221e\": container with ID starting with e5a17ec2a6fb2cf04c70bb09cc570fe23e40a2e77ac25f0d1caa8b5b7de1221e not found: ID does not exist" containerID="e5a17ec2a6fb2cf04c70bb09cc570fe23e40a2e77ac25f0d1caa8b5b7de1221e" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.725080 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5a17ec2a6fb2cf04c70bb09cc570fe23e40a2e77ac25f0d1caa8b5b7de1221e"} err="failed to get container status \"e5a17ec2a6fb2cf04c70bb09cc570fe23e40a2e77ac25f0d1caa8b5b7de1221e\": rpc error: code = NotFound desc = could not find container \"e5a17ec2a6fb2cf04c70bb09cc570fe23e40a2e77ac25f0d1caa8b5b7de1221e\": container with ID starting with e5a17ec2a6fb2cf04c70bb09cc570fe23e40a2e77ac25f0d1caa8b5b7de1221e not found: ID does not exist" Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.761821 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-n7bw7"] Oct 02 21:43:39 crc kubenswrapper[4636]: I1002 21:43:39.771039 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-n7bw7"] Oct 02 21:43:40 crc kubenswrapper[4636]: I1002 21:43:40.241233 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 02 21:43:40 crc kubenswrapper[4636]: I1002 21:43:40.490143 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54b7f7458b-qqbgx" event={"ID":"59bde702-8c1a-491a-a1f8-0c0862f05d77","Type":"ContainerStarted","Data":"dd870912982a5e77271ed7f862bd63ae5a6e28fd3cfe4d3ec7eb83316ed69640"} Oct 02 21:43:40 crc kubenswrapper[4636]: I1002 21:43:40.491929 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:40 crc kubenswrapper[4636]: I1002 21:43:40.491957 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:40 crc kubenswrapper[4636]: I1002 21:43:40.506323 4636 generic.go:334] "Generic (PLEG): container finished" podID="faafa825-e4c3-45ed-92f6-1798993353d0" containerID="9f94671a75f47c72a0c6f608da5e48835b8a34910ef2639bfa5cb84c3df401bc" exitCode=0 Oct 02 21:43:40 crc kubenswrapper[4636]: I1002 21:43:40.506403 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" event={"ID":"faafa825-e4c3-45ed-92f6-1798993353d0","Type":"ContainerDied","Data":"9f94671a75f47c72a0c6f608da5e48835b8a34910ef2639bfa5cb84c3df401bc"} Oct 02 21:43:40 crc kubenswrapper[4636]: I1002 21:43:40.542236 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-54b7f7458b-qqbgx" podStartSLOduration=4.54219932 podStartE2EDuration="4.54219932s" podCreationTimestamp="2025-10-02 21:43:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:40.539085177 +0000 UTC m=+1211.862093196" watchObservedRunningTime="2025-10-02 21:43:40.54219932 +0000 UTC m=+1211.865207349" Oct 02 21:43:40 crc kubenswrapper[4636]: I1002 21:43:40.552678 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75d644d86d-xqtl4" event={"ID":"6e944a52-400a-4253-afad-9c8ab5abd662","Type":"ContainerStarted","Data":"650af3b5000135c170b1d7f4acdfba6ab8650ff85e59e1fef2a428b5847f6329"} Oct 02 21:43:40 crc kubenswrapper[4636]: I1002 21:43:40.552735 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75d644d86d-xqtl4" event={"ID":"6e944a52-400a-4253-afad-9c8ab5abd662","Type":"ContainerStarted","Data":"bc3725dcae8ce3e907f78f883be9d5a2484420a926e8390aedb084ea6346424c"} Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.094736 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-655dcc86f7-zxnj7"] Oct 02 21:43:41 crc kubenswrapper[4636]: E1002 21:43:41.095372 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d92db512-14f3-4d38-bcbf-f223af634dcd" containerName="init" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.095444 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d92db512-14f3-4d38-bcbf-f223af634dcd" containerName="init" Oct 02 21:43:41 crc kubenswrapper[4636]: E1002 21:43:41.095555 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d92db512-14f3-4d38-bcbf-f223af634dcd" containerName="dnsmasq-dns" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.095617 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d92db512-14f3-4d38-bcbf-f223af634dcd" containerName="dnsmasq-dns" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.095892 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d92db512-14f3-4d38-bcbf-f223af634dcd" containerName="dnsmasq-dns" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.097102 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.100322 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 02 21:43:41 crc kubenswrapper[4636]: W1002 21:43:41.100544 4636 reflector.go:561] object-"openstack"/"cert-neutron-internal-svc": failed to list *v1.Secret: secrets "cert-neutron-internal-svc" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Oct 02 21:43:41 crc kubenswrapper[4636]: E1002 21:43:41.100568 4636 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"cert-neutron-internal-svc\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"cert-neutron-internal-svc\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.108273 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-655dcc86f7-zxnj7"] Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.203268 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-ovndb-tls-certs\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.203316 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-public-tls-certs\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.203344 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-config\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.203373 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-internal-tls-certs\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.203395 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-httpd-config\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.203411 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlnlh\" (UniqueName: \"kubernetes.io/projected/181f63c0-d749-44d0-8bc8-588a8d1fc12f-kube-api-access-wlnlh\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.203456 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-combined-ca-bundle\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.293899 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.334180 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-ovndb-tls-certs\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.334232 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-public-tls-certs\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.334257 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-config\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.334280 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-internal-tls-certs\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.334298 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-httpd-config\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.334313 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlnlh\" (UniqueName: \"kubernetes.io/projected/181f63c0-d749-44d0-8bc8-588a8d1fc12f-kube-api-access-wlnlh\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.334357 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-combined-ca-bundle\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.338914 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.349044 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-combined-ca-bundle\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.360261 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.361437 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-public-tls-certs\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.361895 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-config\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.367392 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-ovndb-tls-certs\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.370416 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlnlh\" (UniqueName: \"kubernetes.io/projected/181f63c0-d749-44d0-8bc8-588a8d1fc12f-kube-api-access-wlnlh\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.407090 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-httpd-config\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.561141 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efc87c6-ceca-46c0-a8ef-c24a44ae735d","Type":"ContainerStarted","Data":"c8a8ac52b0a0e2fe1aef619acd1c938073ca40472ef7eeaecb6ec798fb54f2a1"} Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.563348 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" event={"ID":"faafa825-e4c3-45ed-92f6-1798993353d0","Type":"ContainerStarted","Data":"5d924c48a3e880df5961f2af92c3d0dff8ab6676a0bf2c49198c19d53a3aeb45"} Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.563478 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.565207 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75d644d86d-xqtl4" event={"ID":"6e944a52-400a-4253-afad-9c8ab5abd662","Type":"ContainerStarted","Data":"07fb6f09ed97bc60ae26587a5b976539cf0b8a2adc8ad857173c97f33c24d2b2"} Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.565462 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.565599 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="c5f19df9-144a-454c-afe1-08fa91b9312d" containerName="cinder-scheduler" containerID="cri-o://71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2" gracePeriod=30 Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.565733 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="c5f19df9-144a-454c-afe1-08fa91b9312d" containerName="probe" containerID="cri-o://31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd" gracePeriod=30 Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.590555 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" podStartSLOduration=4.590540069 podStartE2EDuration="4.590540069s" podCreationTimestamp="2025-10-02 21:43:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:41.590537659 +0000 UTC m=+1212.913545668" watchObservedRunningTime="2025-10-02 21:43:41.590540069 +0000 UTC m=+1212.913548088" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.612311 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-75d644d86d-xqtl4" podStartSLOduration=4.6122955900000004 podStartE2EDuration="4.61229559s" podCreationTimestamp="2025-10-02 21:43:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:41.609985069 +0000 UTC m=+1212.932993088" watchObservedRunningTime="2025-10-02 21:43:41.61229559 +0000 UTC m=+1212.935303599" Oct 02 21:43:41 crc kubenswrapper[4636]: I1002 21:43:41.614134 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d92db512-14f3-4d38-bcbf-f223af634dcd" path="/var/lib/kubelet/pods/d92db512-14f3-4d38-bcbf-f223af634dcd/volumes" Oct 02 21:43:42 crc kubenswrapper[4636]: I1002 21:43:42.129064 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 02 21:43:42 crc kubenswrapper[4636]: I1002 21:43:42.142501 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/181f63c0-d749-44d0-8bc8-588a8d1fc12f-internal-tls-certs\") pod \"neutron-655dcc86f7-zxnj7\" (UID: \"181f63c0-d749-44d0-8bc8-588a8d1fc12f\") " pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:42 crc kubenswrapper[4636]: I1002 21:43:42.320282 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:42 crc kubenswrapper[4636]: I1002 21:43:42.593834 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efc87c6-ceca-46c0-a8ef-c24a44ae735d","Type":"ContainerStarted","Data":"39ab4843cb5a40dfebfdd2c67d4a4757702b9103edc67bd8aca9423f6eb05b31"} Oct 02 21:43:42 crc kubenswrapper[4636]: I1002 21:43:42.978599 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.6961952829999998 podStartE2EDuration="10.978580748s" podCreationTimestamp="2025-10-02 21:43:32 +0000 UTC" firstStartedPulling="2025-10-02 21:43:33.647626318 +0000 UTC m=+1204.970634337" lastFinishedPulling="2025-10-02 21:43:41.930011783 +0000 UTC m=+1213.253019802" observedRunningTime="2025-10-02 21:43:42.635265962 +0000 UTC m=+1213.958274011" watchObservedRunningTime="2025-10-02 21:43:42.978580748 +0000 UTC m=+1214.301588767" Oct 02 21:43:42 crc kubenswrapper[4636]: I1002 21:43:42.985034 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-655dcc86f7-zxnj7"] Oct 02 21:43:43 crc kubenswrapper[4636]: I1002 21:43:43.374282 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6897cb4484-tthsj" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:43 crc kubenswrapper[4636]: I1002 21:43:43.491672 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7646d88f4d-85mgl" podUID="65063729-cda3-488f-8e94-364db15e2d2d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:43 crc kubenswrapper[4636]: I1002 21:43:43.492048 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:43:43 crc kubenswrapper[4636]: I1002 21:43:43.492796 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"3cd88a149e153dcf509a1fbfa49ed066efedce2a10516a990ddad09a7052ca1e"} pod="openstack/horizon-7646d88f4d-85mgl" containerMessage="Container horizon failed startup probe, will be restarted" Oct 02 21:43:43 crc kubenswrapper[4636]: I1002 21:43:43.492827 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7646d88f4d-85mgl" podUID="65063729-cda3-488f-8e94-364db15e2d2d" containerName="horizon" containerID="cri-o://3cd88a149e153dcf509a1fbfa49ed066efedce2a10516a990ddad09a7052ca1e" gracePeriod=30 Oct 02 21:43:43 crc kubenswrapper[4636]: I1002 21:43:43.615780 4636 generic.go:334] "Generic (PLEG): container finished" podID="c5f19df9-144a-454c-afe1-08fa91b9312d" containerID="31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd" exitCode=0 Oct 02 21:43:43 crc kubenswrapper[4636]: I1002 21:43:43.616346 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-655dcc86f7-zxnj7" event={"ID":"181f63c0-d749-44d0-8bc8-588a8d1fc12f","Type":"ContainerStarted","Data":"219ea17bb527ebe31297906dbf3cbce02128e2c5272154c8c09acea14d3fd65a"} Oct 02 21:43:43 crc kubenswrapper[4636]: I1002 21:43:43.616382 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-655dcc86f7-zxnj7" event={"ID":"181f63c0-d749-44d0-8bc8-588a8d1fc12f","Type":"ContainerStarted","Data":"49db447fed80815bac551e16f03440e57c61ce6f2e65933f87f482c43025b598"} Oct 02 21:43:43 crc kubenswrapper[4636]: I1002 21:43:43.616392 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c5f19df9-144a-454c-afe1-08fa91b9312d","Type":"ContainerDied","Data":"31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd"} Oct 02 21:43:43 crc kubenswrapper[4636]: I1002 21:43:43.616436 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.143831 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.292402 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data-custom\") pod \"c5f19df9-144a-454c-afe1-08fa91b9312d\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.292478 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-scripts\") pod \"c5f19df9-144a-454c-afe1-08fa91b9312d\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.292545 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c5f19df9-144a-454c-afe1-08fa91b9312d-etc-machine-id\") pod \"c5f19df9-144a-454c-afe1-08fa91b9312d\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.292648 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data\") pod \"c5f19df9-144a-454c-afe1-08fa91b9312d\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.292708 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-combined-ca-bundle\") pod \"c5f19df9-144a-454c-afe1-08fa91b9312d\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.292737 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjxv4\" (UniqueName: \"kubernetes.io/projected/c5f19df9-144a-454c-afe1-08fa91b9312d-kube-api-access-mjxv4\") pod \"c5f19df9-144a-454c-afe1-08fa91b9312d\" (UID: \"c5f19df9-144a-454c-afe1-08fa91b9312d\") " Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.292861 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5f19df9-144a-454c-afe1-08fa91b9312d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c5f19df9-144a-454c-afe1-08fa91b9312d" (UID: "c5f19df9-144a-454c-afe1-08fa91b9312d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.293173 4636 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c5f19df9-144a-454c-afe1-08fa91b9312d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.302915 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5f19df9-144a-454c-afe1-08fa91b9312d-kube-api-access-mjxv4" (OuterVolumeSpecName: "kube-api-access-mjxv4") pod "c5f19df9-144a-454c-afe1-08fa91b9312d" (UID: "c5f19df9-144a-454c-afe1-08fa91b9312d"). InnerVolumeSpecName "kube-api-access-mjxv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.313058 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-scripts" (OuterVolumeSpecName: "scripts") pod "c5f19df9-144a-454c-afe1-08fa91b9312d" (UID: "c5f19df9-144a-454c-afe1-08fa91b9312d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.313215 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c5f19df9-144a-454c-afe1-08fa91b9312d" (UID: "c5f19df9-144a-454c-afe1-08fa91b9312d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.394449 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjxv4\" (UniqueName: \"kubernetes.io/projected/c5f19df9-144a-454c-afe1-08fa91b9312d-kube-api-access-mjxv4\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.394476 4636 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.394485 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.398054 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.444397 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5f19df9-144a-454c-afe1-08fa91b9312d" (UID: "c5f19df9-144a-454c-afe1-08fa91b9312d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.496728 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.503983 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data" (OuterVolumeSpecName: "config-data") pod "c5f19df9-144a-454c-afe1-08fa91b9312d" (UID: "c5f19df9-144a-454c-afe1-08fa91b9312d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.598324 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5f19df9-144a-454c-afe1-08fa91b9312d-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.625777 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-655dcc86f7-zxnj7" event={"ID":"181f63c0-d749-44d0-8bc8-588a8d1fc12f","Type":"ContainerStarted","Data":"d07405530f00b5b062aa7b00f60bdd61218837b26eb149910534f640d42b98d3"} Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.625824 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.627084 4636 generic.go:334] "Generic (PLEG): container finished" podID="c5f19df9-144a-454c-afe1-08fa91b9312d" containerID="71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2" exitCode=0 Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.627112 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.627146 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c5f19df9-144a-454c-afe1-08fa91b9312d","Type":"ContainerDied","Data":"71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2"} Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.627188 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c5f19df9-144a-454c-afe1-08fa91b9312d","Type":"ContainerDied","Data":"4e02d0db0800dd2ff56191a069b34d0382b8f85a395360993d266caa1eaa32de"} Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.627208 4636 scope.go:117] "RemoveContainer" containerID="31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.663901 4636 scope.go:117] "RemoveContainer" containerID="71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.704500 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-655dcc86f7-zxnj7" podStartSLOduration=3.704478856 podStartE2EDuration="3.704478856s" podCreationTimestamp="2025-10-02 21:43:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:44.679648463 +0000 UTC m=+1216.002656482" watchObservedRunningTime="2025-10-02 21:43:44.704478856 +0000 UTC m=+1216.027486875" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.719960 4636 scope.go:117] "RemoveContainer" containerID="31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd" Oct 02 21:43:44 crc kubenswrapper[4636]: E1002 21:43:44.720394 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd\": container with ID starting with 31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd not found: ID does not exist" containerID="31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.720423 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd"} err="failed to get container status \"31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd\": rpc error: code = NotFound desc = could not find container \"31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd\": container with ID starting with 31f79106b1c697513ee35c10d54ba43998867600c09f024b270dfe25b4b0a0bd not found: ID does not exist" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.720443 4636 scope.go:117] "RemoveContainer" containerID="71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2" Oct 02 21:43:44 crc kubenswrapper[4636]: E1002 21:43:44.720882 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2\": container with ID starting with 71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2 not found: ID does not exist" containerID="71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.720899 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2"} err="failed to get container status \"71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2\": rpc error: code = NotFound desc = could not find container \"71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2\": container with ID starting with 71155960f3c190d8990c81ef7ec91ab084f92b80b85485ecec89d532744611e2 not found: ID does not exist" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.773054 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.790411 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.808811 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 02 21:43:44 crc kubenswrapper[4636]: E1002 21:43:44.809322 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5f19df9-144a-454c-afe1-08fa91b9312d" containerName="cinder-scheduler" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.809346 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5f19df9-144a-454c-afe1-08fa91b9312d" containerName="cinder-scheduler" Oct 02 21:43:44 crc kubenswrapper[4636]: E1002 21:43:44.809366 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5f19df9-144a-454c-afe1-08fa91b9312d" containerName="probe" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.809375 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5f19df9-144a-454c-afe1-08fa91b9312d" containerName="probe" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.809559 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5f19df9-144a-454c-afe1-08fa91b9312d" containerName="cinder-scheduler" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.809591 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5f19df9-144a-454c-afe1-08fa91b9312d" containerName="probe" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.810557 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.818940 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.820017 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.914644 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-config-data\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.914983 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-scripts\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.915064 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.915098 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.915115 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x9gw\" (UniqueName: \"kubernetes.io/projected/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-kube-api-access-8x9gw\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:44 crc kubenswrapper[4636]: I1002 21:43:44.915138 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.016391 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x9gw\" (UniqueName: \"kubernetes.io/projected/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-kube-api-access-8x9gw\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.016429 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.016468 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.016530 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-config-data\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.016557 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-scripts\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.016626 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.016806 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.021421 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.022178 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-config-data\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.022287 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.025523 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-scripts\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.036944 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x9gw\" (UniqueName: \"kubernetes.io/projected/78574741-b4d5-4f6c-a92e-37cf2aeeeaca-kube-api-access-8x9gw\") pod \"cinder-scheduler-0\" (UID: \"78574741-b4d5-4f6c-a92e-37cf2aeeeaca\") " pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.154169 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.441114 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.441064 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.568565 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.586961 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-574b88487b-hjf97" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.660725 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5f19df9-144a-454c-afe1-08fa91b9312d" path="/var/lib/kubelet/pods/c5f19df9-144a-454c-afe1-08fa91b9312d/volumes" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.697046 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.161:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:45 crc kubenswrapper[4636]: I1002 21:43:45.787223 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 02 21:43:46 crc kubenswrapper[4636]: I1002 21:43:46.401986 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:46 crc kubenswrapper[4636]: I1002 21:43:46.717982 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"78574741-b4d5-4f6c-a92e-37cf2aeeeaca","Type":"ContainerStarted","Data":"789f18b01876031119d8ab6e00f6a15058275149c20db391cc486994204c29a4"} Oct 02 21:43:46 crc kubenswrapper[4636]: I1002 21:43:46.718028 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"78574741-b4d5-4f6c-a92e-37cf2aeeeaca","Type":"ContainerStarted","Data":"59a18d8a2edd519d98c837d60eed5c02e5bde7f399531f01a3d710ba49cab571"} Oct 02 21:43:47 crc kubenswrapper[4636]: I1002 21:43:47.728438 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"78574741-b4d5-4f6c-a92e-37cf2aeeeaca","Type":"ContainerStarted","Data":"a646a1856a9be563b7cb29045fff9a1e8152a64b1509f435264e9a895377544b"} Oct 02 21:43:47 crc kubenswrapper[4636]: I1002 21:43:47.730869 4636 generic.go:334] "Generic (PLEG): container finished" podID="65063729-cda3-488f-8e94-364db15e2d2d" containerID="3cd88a149e153dcf509a1fbfa49ed066efedce2a10516a990ddad09a7052ca1e" exitCode=0 Oct 02 21:43:47 crc kubenswrapper[4636]: I1002 21:43:47.730898 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7646d88f4d-85mgl" event={"ID":"65063729-cda3-488f-8e94-364db15e2d2d","Type":"ContainerDied","Data":"3cd88a149e153dcf509a1fbfa49ed066efedce2a10516a990ddad09a7052ca1e"} Oct 02 21:43:47 crc kubenswrapper[4636]: I1002 21:43:47.730916 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7646d88f4d-85mgl" event={"ID":"65063729-cda3-488f-8e94-364db15e2d2d","Type":"ContainerStarted","Data":"0d50d981993a8c7a14f5be874fc82c71a331452cefb9991b2bc42383c51602c1"} Oct 02 21:43:47 crc kubenswrapper[4636]: I1002 21:43:47.751557 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.751536566 podStartE2EDuration="3.751536566s" podCreationTimestamp="2025-10-02 21:43:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:43:47.74493979 +0000 UTC m=+1219.067947819" watchObservedRunningTime="2025-10-02 21:43:47.751536566 +0000 UTC m=+1219.074544585" Oct 02 21:43:48 crc kubenswrapper[4636]: I1002 21:43:48.401949 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:43:48 crc kubenswrapper[4636]: I1002 21:43:48.475697 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:43:48 crc kubenswrapper[4636]: I1002 21:43:48.475814 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:43:48 crc kubenswrapper[4636]: I1002 21:43:48.558894 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-v2w52"] Oct 02 21:43:48 crc kubenswrapper[4636]: I1002 21:43:48.559427 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" podUID="f6ae6eac-c5ec-4476-83e2-2b5532974b55" containerName="dnsmasq-dns" containerID="cri-o://0f268d7ec6f5089d2a12d89ecfdc5526dfcf9a1d9eac05067e5b45bd12e93de7" gracePeriod=10 Oct 02 21:43:48 crc kubenswrapper[4636]: I1002 21:43:48.740537 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:43:48 crc kubenswrapper[4636]: I1002 21:43:48.841005 4636 generic.go:334] "Generic (PLEG): container finished" podID="f6ae6eac-c5ec-4476-83e2-2b5532974b55" containerID="0f268d7ec6f5089d2a12d89ecfdc5526dfcf9a1d9eac05067e5b45bd12e93de7" exitCode=0 Oct 02 21:43:48 crc kubenswrapper[4636]: I1002 21:43:48.841494 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" event={"ID":"f6ae6eac-c5ec-4476-83e2-2b5532974b55","Type":"ContainerDied","Data":"0f268d7ec6f5089d2a12d89ecfdc5526dfcf9a1d9eac05067e5b45bd12e93de7"} Oct 02 21:43:48 crc kubenswrapper[4636]: E1002 21:43:48.981255 4636 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf6ae6eac_c5ec_4476_83e2_2b5532974b55.slice/crio-0f268d7ec6f5089d2a12d89ecfdc5526dfcf9a1d9eac05067e5b45bd12e93de7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf6ae6eac_c5ec_4476_83e2_2b5532974b55.slice/crio-conmon-0f268d7ec6f5089d2a12d89ecfdc5526dfcf9a1d9eac05067e5b45bd12e93de7.scope\": RecentStats: unable to find data in memory cache]" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.377943 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.445898 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.507595 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-nb\") pod \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.507648 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-config\") pod \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.507785 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-swift-storage-0\") pod \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.507858 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4bwh\" (UniqueName: \"kubernetes.io/projected/f6ae6eac-c5ec-4476-83e2-2b5532974b55-kube-api-access-v4bwh\") pod \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.507893 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-sb\") pod \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.507942 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-svc\") pod \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\" (UID: \"f6ae6eac-c5ec-4476-83e2-2b5532974b55\") " Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.548229 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6ae6eac-c5ec-4476-83e2-2b5532974b55-kube-api-access-v4bwh" (OuterVolumeSpecName: "kube-api-access-v4bwh") pod "f6ae6eac-c5ec-4476-83e2-2b5532974b55" (UID: "f6ae6eac-c5ec-4476-83e2-2b5532974b55"). InnerVolumeSpecName "kube-api-access-v4bwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.632209 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4bwh\" (UniqueName: \"kubernetes.io/projected/f6ae6eac-c5ec-4476-83e2-2b5532974b55-kube-api-access-v4bwh\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.641081 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f6ae6eac-c5ec-4476-83e2-2b5532974b55" (UID: "f6ae6eac-c5ec-4476-83e2-2b5532974b55"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.660869 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f6ae6eac-c5ec-4476-83e2-2b5532974b55" (UID: "f6ae6eac-c5ec-4476-83e2-2b5532974b55"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.665445 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f6ae6eac-c5ec-4476-83e2-2b5532974b55" (UID: "f6ae6eac-c5ec-4476-83e2-2b5532974b55"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.691675 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f6ae6eac-c5ec-4476-83e2-2b5532974b55" (UID: "f6ae6eac-c5ec-4476-83e2-2b5532974b55"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.719221 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-config" (OuterVolumeSpecName: "config") pod "f6ae6eac-c5ec-4476-83e2-2b5532974b55" (UID: "f6ae6eac-c5ec-4476-83e2-2b5532974b55"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.736806 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.736834 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.736843 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.736949 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.737013 4636 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6ae6eac-c5ec-4476-83e2-2b5532974b55-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.870359 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.871741 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-v2w52" event={"ID":"f6ae6eac-c5ec-4476-83e2-2b5532974b55","Type":"ContainerDied","Data":"dd814149456aebd4569e0f73f8add357738fa79ef3ffde3d7e91924788f39791"} Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.871813 4636 scope.go:117] "RemoveContainer" containerID="0f268d7ec6f5089d2a12d89ecfdc5526dfcf9a1d9eac05067e5b45bd12e93de7" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.920206 4636 scope.go:117] "RemoveContainer" containerID="ca64e98dab0a8d19754187dd518e7ea15863850246dee65c260d9cb4ce8aafd1" Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.925220 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-v2w52"] Oct 02 21:43:49 crc kubenswrapper[4636]: I1002 21:43:49.931764 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-v2w52"] Oct 02 21:43:50 crc kubenswrapper[4636]: I1002 21:43:50.154309 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 02 21:43:50 crc kubenswrapper[4636]: I1002 21:43:50.523985 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:50 crc kubenswrapper[4636]: I1002 21:43:50.524045 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:50 crc kubenswrapper[4636]: I1002 21:43:50.544152 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:50 crc kubenswrapper[4636]: I1002 21:43:50.545768 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:50 crc kubenswrapper[4636]: I1002 21:43:50.622783 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-767589cc85-t7ltn" Oct 02 21:43:50 crc kubenswrapper[4636]: I1002 21:43:50.743442 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.161:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:50 crc kubenswrapper[4636]: I1002 21:43:50.843908 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-54b7f7458b-qqbgx" podUID="59bde702-8c1a-491a-a1f8-0c0862f05d77" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.163:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:50 crc kubenswrapper[4636]: I1002 21:43:50.844378 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-54b7f7458b-qqbgx" podUID="59bde702-8c1a-491a-a1f8-0c0862f05d77" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.163:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:51 crc kubenswrapper[4636]: I1002 21:43:51.613413 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6ae6eac-c5ec-4476-83e2-2b5532974b55" path="/var/lib/kubelet/pods/f6ae6eac-c5ec-4476-83e2-2b5532974b55/volumes" Oct 02 21:43:51 crc kubenswrapper[4636]: I1002 21:43:51.864956 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-54b7f7458b-qqbgx" podUID="59bde702-8c1a-491a-a1f8-0c0862f05d77" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.163:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:51 crc kubenswrapper[4636]: I1002 21:43:51.865042 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-54b7f7458b-qqbgx" podUID="59bde702-8c1a-491a-a1f8-0c0862f05d77" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.163:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:43:54 crc kubenswrapper[4636]: I1002 21:43:54.159139 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:54 crc kubenswrapper[4636]: I1002 21:43:54.495872 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 02 21:43:54 crc kubenswrapper[4636]: I1002 21:43:54.874600 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54b7f7458b-qqbgx" Oct 02 21:43:54 crc kubenswrapper[4636]: I1002 21:43:54.926888 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7888867b8d-8s7qr"] Oct 02 21:43:54 crc kubenswrapper[4636]: I1002 21:43:54.927129 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api-log" containerID="cri-o://92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2" gracePeriod=30 Oct 02 21:43:54 crc kubenswrapper[4636]: I1002 21:43:54.927177 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api" containerID="cri-o://ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36" gracePeriod=30 Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.428503 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.553447 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 02 21:43:55 crc kubenswrapper[4636]: E1002 21:43:55.553843 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6ae6eac-c5ec-4476-83e2-2b5532974b55" containerName="init" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.553858 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6ae6eac-c5ec-4476-83e2-2b5532974b55" containerName="init" Oct 02 21:43:55 crc kubenswrapper[4636]: E1002 21:43:55.553884 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6ae6eac-c5ec-4476-83e2-2b5532974b55" containerName="dnsmasq-dns" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.553890 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6ae6eac-c5ec-4476-83e2-2b5532974b55" containerName="dnsmasq-dns" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.554057 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6ae6eac-c5ec-4476-83e2-2b5532974b55" containerName="dnsmasq-dns" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.554654 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.562037 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.562306 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-c7d64" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.562905 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.568148 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.578666 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f415d8af-0c69-42dc-b3d5-bb5cfa456768-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.578698 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f415d8af-0c69-42dc-b3d5-bb5cfa456768-openstack-config-secret\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.578739 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f415d8af-0c69-42dc-b3d5-bb5cfa456768-openstack-config\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.578865 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx2k8\" (UniqueName: \"kubernetes.io/projected/f415d8af-0c69-42dc-b3d5-bb5cfa456768-kube-api-access-jx2k8\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.681016 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f415d8af-0c69-42dc-b3d5-bb5cfa456768-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.681301 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f415d8af-0c69-42dc-b3d5-bb5cfa456768-openstack-config-secret\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.681406 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f415d8af-0c69-42dc-b3d5-bb5cfa456768-openstack-config\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.681495 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx2k8\" (UniqueName: \"kubernetes.io/projected/f415d8af-0c69-42dc-b3d5-bb5cfa456768-kube-api-access-jx2k8\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.686419 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f415d8af-0c69-42dc-b3d5-bb5cfa456768-openstack-config\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.687336 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f415d8af-0c69-42dc-b3d5-bb5cfa456768-openstack-config-secret\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.702275 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f415d8af-0c69-42dc-b3d5-bb5cfa456768-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.702458 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx2k8\" (UniqueName: \"kubernetes.io/projected/f415d8af-0c69-42dc-b3d5-bb5cfa456768-kube-api-access-jx2k8\") pod \"openstackclient\" (UID: \"f415d8af-0c69-42dc-b3d5-bb5cfa456768\") " pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.876340 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.990601 4636 generic.go:334] "Generic (PLEG): container finished" podID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerID="92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2" exitCode=143 Oct 02 21:43:55 crc kubenswrapper[4636]: I1002 21:43:55.990637 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7888867b8d-8s7qr" event={"ID":"008c551b-ebc5-476f-8445-5c976ca7ce57","Type":"ContainerDied","Data":"92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2"} Oct 02 21:43:56 crc kubenswrapper[4636]: W1002 21:43:56.526782 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf415d8af_0c69_42dc_b3d5_bb5cfa456768.slice/crio-1ddef22e5347b4497e60acee8344e45d3e33e584486e4908087ecdb9a51d6fa6 WatchSource:0}: Error finding container 1ddef22e5347b4497e60acee8344e45d3e33e584486e4908087ecdb9a51d6fa6: Status 404 returned error can't find the container with id 1ddef22e5347b4497e60acee8344e45d3e33e584486e4908087ecdb9a51d6fa6 Oct 02 21:43:56 crc kubenswrapper[4636]: I1002 21:43:56.529215 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.002680 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f415d8af-0c69-42dc-b3d5-bb5cfa456768","Type":"ContainerStarted","Data":"1ddef22e5347b4497e60acee8344e45d3e33e584486e4908087ecdb9a51d6fa6"} Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.734633 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6cfb778d8c-pmqb5"] Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.737048 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.743301 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.744066 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6cfb778d8c-pmqb5"] Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.744208 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.744397 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.831502 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-config-data\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.831586 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-public-tls-certs\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.831613 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4bc228bc-6368-438e-a574-aa4c80d81dc6-etc-swift\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.831665 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4bc228bc-6368-438e-a574-aa4c80d81dc6-log-httpd\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.831684 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4bc228bc-6368-438e-a574-aa4c80d81dc6-run-httpd\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.831706 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz9l9\" (UniqueName: \"kubernetes.io/projected/4bc228bc-6368-438e-a574-aa4c80d81dc6-kube-api-access-tz9l9\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.831727 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-combined-ca-bundle\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.831743 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-internal-tls-certs\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.933951 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz9l9\" (UniqueName: \"kubernetes.io/projected/4bc228bc-6368-438e-a574-aa4c80d81dc6-kube-api-access-tz9l9\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.934005 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-combined-ca-bundle\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.934024 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-internal-tls-certs\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.934101 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-config-data\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.934159 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-public-tls-certs\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.934178 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4bc228bc-6368-438e-a574-aa4c80d81dc6-etc-swift\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.934205 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4bc228bc-6368-438e-a574-aa4c80d81dc6-log-httpd\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.934222 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4bc228bc-6368-438e-a574-aa4c80d81dc6-run-httpd\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.934890 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4bc228bc-6368-438e-a574-aa4c80d81dc6-run-httpd\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.934922 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4bc228bc-6368-438e-a574-aa4c80d81dc6-log-httpd\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.942074 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4bc228bc-6368-438e-a574-aa4c80d81dc6-etc-swift\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.944406 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-internal-tls-certs\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.945314 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-public-tls-certs\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.945407 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-config-data\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.958147 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bc228bc-6368-438e-a574-aa4c80d81dc6-combined-ca-bundle\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:57 crc kubenswrapper[4636]: I1002 21:43:57.969515 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz9l9\" (UniqueName: \"kubernetes.io/projected/4bc228bc-6368-438e-a574-aa4c80d81dc6-kube-api-access-tz9l9\") pod \"swift-proxy-6cfb778d8c-pmqb5\" (UID: \"4bc228bc-6368-438e-a574-aa4c80d81dc6\") " pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.071722 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.476223 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7646d88f4d-85mgl" podUID="65063729-cda3-488f-8e94-364db15e2d2d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.504927 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:55970->10.217.0.158:9311: read: connection reset by peer" Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.504973 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7888867b8d-8s7qr" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:55972->10.217.0.158:9311: read: connection reset by peer" Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.822440 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6cfb778d8c-pmqb5"] Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.902334 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.989894 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/008c551b-ebc5-476f-8445-5c976ca7ce57-logs\") pod \"008c551b-ebc5-476f-8445-5c976ca7ce57\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.990002 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data\") pod \"008c551b-ebc5-476f-8445-5c976ca7ce57\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.990093 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dr22c\" (UniqueName: \"kubernetes.io/projected/008c551b-ebc5-476f-8445-5c976ca7ce57-kube-api-access-dr22c\") pod \"008c551b-ebc5-476f-8445-5c976ca7ce57\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.990344 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data-custom\") pod \"008c551b-ebc5-476f-8445-5c976ca7ce57\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.990369 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-combined-ca-bundle\") pod \"008c551b-ebc5-476f-8445-5c976ca7ce57\" (UID: \"008c551b-ebc5-476f-8445-5c976ca7ce57\") " Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.990431 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/008c551b-ebc5-476f-8445-5c976ca7ce57-logs" (OuterVolumeSpecName: "logs") pod "008c551b-ebc5-476f-8445-5c976ca7ce57" (UID: "008c551b-ebc5-476f-8445-5c976ca7ce57"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:43:58 crc kubenswrapper[4636]: I1002 21:43:58.991115 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/008c551b-ebc5-476f-8445-5c976ca7ce57-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:58.993510 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/008c551b-ebc5-476f-8445-5c976ca7ce57-kube-api-access-dr22c" (OuterVolumeSpecName: "kube-api-access-dr22c") pod "008c551b-ebc5-476f-8445-5c976ca7ce57" (UID: "008c551b-ebc5-476f-8445-5c976ca7ce57"). InnerVolumeSpecName "kube-api-access-dr22c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.003679 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "008c551b-ebc5-476f-8445-5c976ca7ce57" (UID: "008c551b-ebc5-476f-8445-5c976ca7ce57"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.065542 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6cfb778d8c-pmqb5" event={"ID":"4bc228bc-6368-438e-a574-aa4c80d81dc6","Type":"ContainerStarted","Data":"d349d605944d9d3e3a1e9ec3c7a5479bc9f9d70b0fa13e34b70a0ea587aa26b0"} Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.069893 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "008c551b-ebc5-476f-8445-5c976ca7ce57" (UID: "008c551b-ebc5-476f-8445-5c976ca7ce57"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.072611 4636 generic.go:334] "Generic (PLEG): container finished" podID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerID="ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36" exitCode=0 Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.072648 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7888867b8d-8s7qr" event={"ID":"008c551b-ebc5-476f-8445-5c976ca7ce57","Type":"ContainerDied","Data":"ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36"} Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.072671 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7888867b8d-8s7qr" event={"ID":"008c551b-ebc5-476f-8445-5c976ca7ce57","Type":"ContainerDied","Data":"d449d8715941effba59826738247c29837dd74111fd98f6b2e57559e9765d36b"} Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.072686 4636 scope.go:117] "RemoveContainer" containerID="ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.072828 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7888867b8d-8s7qr" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.096863 4636 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.102591 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.102654 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dr22c\" (UniqueName: \"kubernetes.io/projected/008c551b-ebc5-476f-8445-5c976ca7ce57-kube-api-access-dr22c\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.113872 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data" (OuterVolumeSpecName: "config-data") pod "008c551b-ebc5-476f-8445-5c976ca7ce57" (UID: "008c551b-ebc5-476f-8445-5c976ca7ce57"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.203670 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/008c551b-ebc5-476f-8445-5c976ca7ce57-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.230458 4636 scope.go:117] "RemoveContainer" containerID="92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.273647 4636 scope.go:117] "RemoveContainer" containerID="ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36" Oct 02 21:43:59 crc kubenswrapper[4636]: E1002 21:43:59.275183 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36\": container with ID starting with ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36 not found: ID does not exist" containerID="ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.275219 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36"} err="failed to get container status \"ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36\": rpc error: code = NotFound desc = could not find container \"ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36\": container with ID starting with ffc037fe6e104822d298c3a8d4b07a9cb00d2d39ce5eb7a5799a39bab98c3b36 not found: ID does not exist" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.275240 4636 scope.go:117] "RemoveContainer" containerID="92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2" Oct 02 21:43:59 crc kubenswrapper[4636]: E1002 21:43:59.275856 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2\": container with ID starting with 92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2 not found: ID does not exist" containerID="92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.275881 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2"} err="failed to get container status \"92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2\": rpc error: code = NotFound desc = could not find container \"92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2\": container with ID starting with 92ac3e146a7347a4c6bf10333fa868d2ccac7f1bba066646f7d63834359bf1b2 not found: ID does not exist" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.306376 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.306649 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="ceilometer-central-agent" containerID="cri-o://6fd7ff1db052304bfb25ae9eaabb2657ba9c874363cea46c3e0dccec4a308318" gracePeriod=30 Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.309077 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="proxy-httpd" containerID="cri-o://39ab4843cb5a40dfebfdd2c67d4a4757702b9103edc67bd8aca9423f6eb05b31" gracePeriod=30 Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.309227 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="ceilometer-notification-agent" containerID="cri-o://a1232d5d48cd847857afcb14518868fc759aca0848f2d02522fe374c24eecb6a" gracePeriod=30 Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.309284 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="sg-core" containerID="cri-o://c8a8ac52b0a0e2fe1aef619acd1c938073ca40472ef7eeaecb6ec798fb54f2a1" gracePeriod=30 Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.329083 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.413290 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7888867b8d-8s7qr"] Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.420223 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7888867b8d-8s7qr"] Oct 02 21:43:59 crc kubenswrapper[4636]: I1002 21:43:59.620879 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" path="/var/lib/kubelet/pods/008c551b-ebc5-476f-8445-5c976ca7ce57/volumes" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.086830 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6cfb778d8c-pmqb5" event={"ID":"4bc228bc-6368-438e-a574-aa4c80d81dc6","Type":"ContainerStarted","Data":"5fcc79ec5fd0f3ff23541cc50150f9b1613d87683eadd898df15b50fba007e7e"} Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.087966 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.088052 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6cfb778d8c-pmqb5" event={"ID":"4bc228bc-6368-438e-a574-aa4c80d81dc6","Type":"ContainerStarted","Data":"1d4875f65024a228aaa4d1d964cb351cc5b12d8f8c0a84318ac68b88337660b2"} Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.088116 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.107130 4636 generic.go:334] "Generic (PLEG): container finished" podID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerID="39ab4843cb5a40dfebfdd2c67d4a4757702b9103edc67bd8aca9423f6eb05b31" exitCode=0 Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.107380 4636 generic.go:334] "Generic (PLEG): container finished" podID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerID="c8a8ac52b0a0e2fe1aef619acd1c938073ca40472ef7eeaecb6ec798fb54f2a1" exitCode=2 Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.107457 4636 generic.go:334] "Generic (PLEG): container finished" podID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerID="a1232d5d48cd847857afcb14518868fc759aca0848f2d02522fe374c24eecb6a" exitCode=0 Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.107512 4636 generic.go:334] "Generic (PLEG): container finished" podID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerID="6fd7ff1db052304bfb25ae9eaabb2657ba9c874363cea46c3e0dccec4a308318" exitCode=0 Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.107335 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efc87c6-ceca-46c0-a8ef-c24a44ae735d","Type":"ContainerDied","Data":"39ab4843cb5a40dfebfdd2c67d4a4757702b9103edc67bd8aca9423f6eb05b31"} Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.107669 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efc87c6-ceca-46c0-a8ef-c24a44ae735d","Type":"ContainerDied","Data":"c8a8ac52b0a0e2fe1aef619acd1c938073ca40472ef7eeaecb6ec798fb54f2a1"} Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.107737 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efc87c6-ceca-46c0-a8ef-c24a44ae735d","Type":"ContainerDied","Data":"a1232d5d48cd847857afcb14518868fc759aca0848f2d02522fe374c24eecb6a"} Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.107812 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efc87c6-ceca-46c0-a8ef-c24a44ae735d","Type":"ContainerDied","Data":"6fd7ff1db052304bfb25ae9eaabb2657ba9c874363cea46c3e0dccec4a308318"} Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.117762 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6cfb778d8c-pmqb5" podStartSLOduration=3.117731128 podStartE2EDuration="3.117731128s" podCreationTimestamp="2025-10-02 21:43:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:44:00.115483329 +0000 UTC m=+1231.438491348" watchObservedRunningTime="2025-10-02 21:44:00.117731128 +0000 UTC m=+1231.440739147" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.466153 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.540981 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-config-data\") pod \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.541742 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-log-httpd\") pod \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.541837 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-494wb\" (UniqueName: \"kubernetes.io/projected/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-kube-api-access-494wb\") pod \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.541880 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-scripts\") pod \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.541929 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-sg-core-conf-yaml\") pod \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.541947 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-run-httpd\") pod \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.541999 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-combined-ca-bundle\") pod \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\" (UID: \"2efc87c6-ceca-46c0-a8ef-c24a44ae735d\") " Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.542311 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2efc87c6-ceca-46c0-a8ef-c24a44ae735d" (UID: "2efc87c6-ceca-46c0-a8ef-c24a44ae735d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.542787 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2efc87c6-ceca-46c0-a8ef-c24a44ae735d" (UID: "2efc87c6-ceca-46c0-a8ef-c24a44ae735d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.543047 4636 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.543064 4636 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.549958 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-scripts" (OuterVolumeSpecName: "scripts") pod "2efc87c6-ceca-46c0-a8ef-c24a44ae735d" (UID: "2efc87c6-ceca-46c0-a8ef-c24a44ae735d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.550354 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-kube-api-access-494wb" (OuterVolumeSpecName: "kube-api-access-494wb") pod "2efc87c6-ceca-46c0-a8ef-c24a44ae735d" (UID: "2efc87c6-ceca-46c0-a8ef-c24a44ae735d"). InnerVolumeSpecName "kube-api-access-494wb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.614003 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2efc87c6-ceca-46c0-a8ef-c24a44ae735d" (UID: "2efc87c6-ceca-46c0-a8ef-c24a44ae735d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.645005 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-494wb\" (UniqueName: \"kubernetes.io/projected/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-kube-api-access-494wb\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.645036 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.645046 4636 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.668588 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2efc87c6-ceca-46c0-a8ef-c24a44ae735d" (UID: "2efc87c6-ceca-46c0-a8ef-c24a44ae735d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.711563 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-config-data" (OuterVolumeSpecName: "config-data") pod "2efc87c6-ceca-46c0-a8ef-c24a44ae735d" (UID: "2efc87c6-ceca-46c0-a8ef-c24a44ae735d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.746997 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:00 crc kubenswrapper[4636]: I1002 21:44:00.747035 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efc87c6-ceca-46c0-a8ef-c24a44ae735d-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.128219 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.129108 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efc87c6-ceca-46c0-a8ef-c24a44ae735d","Type":"ContainerDied","Data":"4cc3fb7290b22e6c9acb7150da97dbfc2ad5cbd693b66e27e61c935e5726085d"} Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.129144 4636 scope.go:117] "RemoveContainer" containerID="39ab4843cb5a40dfebfdd2c67d4a4757702b9103edc67bd8aca9423f6eb05b31" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.185206 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.192312 4636 scope.go:117] "RemoveContainer" containerID="c8a8ac52b0a0e2fe1aef619acd1c938073ca40472ef7eeaecb6ec798fb54f2a1" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.205724 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.219351 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:01 crc kubenswrapper[4636]: E1002 21:44:01.220028 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api-log" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.220115 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api-log" Oct 02 21:44:01 crc kubenswrapper[4636]: E1002 21:44:01.220179 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="sg-core" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.220238 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="sg-core" Oct 02 21:44:01 crc kubenswrapper[4636]: E1002 21:44:01.220340 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="ceilometer-notification-agent" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.220412 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="ceilometer-notification-agent" Oct 02 21:44:01 crc kubenswrapper[4636]: E1002 21:44:01.220499 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.220654 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api" Oct 02 21:44:01 crc kubenswrapper[4636]: E1002 21:44:01.220813 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="proxy-httpd" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.220951 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="proxy-httpd" Oct 02 21:44:01 crc kubenswrapper[4636]: E1002 21:44:01.221040 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="ceilometer-central-agent" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.221104 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="ceilometer-central-agent" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.228560 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="proxy-httpd" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.228636 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.228646 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="008c551b-ebc5-476f-8445-5c976ca7ce57" containerName="barbican-api-log" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.228682 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="ceilometer-central-agent" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.228700 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="sg-core" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.228715 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" containerName="ceilometer-notification-agent" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.230826 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.234442 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.238682 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.241777 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.253014 4636 scope.go:117] "RemoveContainer" containerID="a1232d5d48cd847857afcb14518868fc759aca0848f2d02522fe374c24eecb6a" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.294873 4636 scope.go:117] "RemoveContainer" containerID="6fd7ff1db052304bfb25ae9eaabb2657ba9c874363cea46c3e0dccec4a308318" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.303143 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-log-httpd\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.303193 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-run-httpd\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.303216 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hwdk\" (UniqueName: \"kubernetes.io/projected/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-kube-api-access-6hwdk\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.303248 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.303264 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-scripts\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.303352 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-config-data\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.303395 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.405772 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.405828 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-scripts\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.405913 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-config-data\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.405979 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.406073 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-log-httpd\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.406110 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-run-httpd\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.406135 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hwdk\" (UniqueName: \"kubernetes.io/projected/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-kube-api-access-6hwdk\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.407112 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-log-httpd\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.407185 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-run-httpd\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.410199 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.410649 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-scripts\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.410730 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.412087 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-config-data\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.430309 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hwdk\" (UniqueName: \"kubernetes.io/projected/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-kube-api-access-6hwdk\") pod \"ceilometer-0\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.556545 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:01 crc kubenswrapper[4636]: I1002 21:44:01.615447 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2efc87c6-ceca-46c0-a8ef-c24a44ae735d" path="/var/lib/kubelet/pods/2efc87c6-ceca-46c0-a8ef-c24a44ae735d/volumes" Oct 02 21:44:02 crc kubenswrapper[4636]: I1002 21:44:02.237013 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:03 crc kubenswrapper[4636]: I1002 21:44:03.166085 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf","Type":"ContainerStarted","Data":"79a86e16c0980c6041172ed51c640852c921f0897e8b5d8a6fe3a6b0b50187ed"} Oct 02 21:44:03 crc kubenswrapper[4636]: I1002 21:44:03.166408 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf","Type":"ContainerStarted","Data":"96ce1c7f6f857087514487458d46997b64bc6c9ba1ddee7a7c0b78ee8dac2824"} Oct 02 21:44:04 crc kubenswrapper[4636]: I1002 21:44:04.216060 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf","Type":"ContainerStarted","Data":"c51734d7e55345c5aa2fbfa7460fdbae42f6f30967460a58671d4d3575407088"} Oct 02 21:44:05 crc kubenswrapper[4636]: I1002 21:44:05.233237 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf","Type":"ContainerStarted","Data":"e183b3cc16f73e179843fcd9515a558008d7684e1f414df7747234409e6179e4"} Oct 02 21:44:05 crc kubenswrapper[4636]: I1002 21:44:05.245404 4636 generic.go:334] "Generic (PLEG): container finished" podID="9c24327e-435e-47c7-9547-904524571570" containerID="2f92401e7a0a1d8862442efa91362be0515ccdffaf59deb392712f609a84f32d" exitCode=137 Oct 02 21:44:05 crc kubenswrapper[4636]: I1002 21:44:05.245456 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c24327e-435e-47c7-9547-904524571570","Type":"ContainerDied","Data":"2f92401e7a0a1d8862442efa91362be0515ccdffaf59deb392712f609a84f32d"} Oct 02 21:44:05 crc kubenswrapper[4636]: I1002 21:44:05.649273 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.161:8776/healthcheck\": dial tcp 10.217.0.161:8776: connect: connection refused" Oct 02 21:44:06 crc kubenswrapper[4636]: I1002 21:44:06.427408 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:08 crc kubenswrapper[4636]: I1002 21:44:08.083614 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:44:08 crc kubenswrapper[4636]: I1002 21:44:08.097804 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6cfb778d8c-pmqb5" Oct 02 21:44:08 crc kubenswrapper[4636]: I1002 21:44:08.374116 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:44:08 crc kubenswrapper[4636]: I1002 21:44:08.476535 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7646d88f4d-85mgl" podUID="65063729-cda3-488f-8e94-364db15e2d2d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Oct 02 21:44:10 crc kubenswrapper[4636]: I1002 21:44:10.649596 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.161:8776/healthcheck\": dial tcp 10.217.0.161:8776: connect: connection refused" Oct 02 21:44:12 crc kubenswrapper[4636]: I1002 21:44:12.363674 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-655dcc86f7-zxnj7" Oct 02 21:44:12 crc kubenswrapper[4636]: I1002 21:44:12.439804 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-75d644d86d-xqtl4"] Oct 02 21:44:12 crc kubenswrapper[4636]: I1002 21:44:12.440040 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-75d644d86d-xqtl4" podUID="6e944a52-400a-4253-afad-9c8ab5abd662" containerName="neutron-api" containerID="cri-o://650af3b5000135c170b1d7f4acdfba6ab8650ff85e59e1fef2a428b5847f6329" gracePeriod=30 Oct 02 21:44:12 crc kubenswrapper[4636]: I1002 21:44:12.440296 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-75d644d86d-xqtl4" podUID="6e944a52-400a-4253-afad-9c8ab5abd662" containerName="neutron-httpd" containerID="cri-o://07fb6f09ed97bc60ae26587a5b976539cf0b8a2adc8ad857173c97f33c24d2b2" gracePeriod=30 Oct 02 21:44:13 crc kubenswrapper[4636]: I1002 21:44:13.362023 4636 generic.go:334] "Generic (PLEG): container finished" podID="6e944a52-400a-4253-afad-9c8ab5abd662" containerID="07fb6f09ed97bc60ae26587a5b976539cf0b8a2adc8ad857173c97f33c24d2b2" exitCode=0 Oct 02 21:44:13 crc kubenswrapper[4636]: I1002 21:44:13.362064 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75d644d86d-xqtl4" event={"ID":"6e944a52-400a-4253-afad-9c8ab5abd662","Type":"ContainerDied","Data":"07fb6f09ed97bc60ae26587a5b976539cf0b8a2adc8ad857173c97f33c24d2b2"} Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.471667 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.574529 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data-custom\") pod \"9c24327e-435e-47c7-9547-904524571570\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.574593 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-scripts\") pod \"9c24327e-435e-47c7-9547-904524571570\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.574655 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data\") pod \"9c24327e-435e-47c7-9547-904524571570\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.574686 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcf6x\" (UniqueName: \"kubernetes.io/projected/9c24327e-435e-47c7-9547-904524571570-kube-api-access-wcf6x\") pod \"9c24327e-435e-47c7-9547-904524571570\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.574807 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-combined-ca-bundle\") pod \"9c24327e-435e-47c7-9547-904524571570\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.574880 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c24327e-435e-47c7-9547-904524571570-etc-machine-id\") pod \"9c24327e-435e-47c7-9547-904524571570\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.574927 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c24327e-435e-47c7-9547-904524571570-logs\") pod \"9c24327e-435e-47c7-9547-904524571570\" (UID: \"9c24327e-435e-47c7-9547-904524571570\") " Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.575809 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c24327e-435e-47c7-9547-904524571570-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9c24327e-435e-47c7-9547-904524571570" (UID: "9c24327e-435e-47c7-9547-904524571570"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.576255 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c24327e-435e-47c7-9547-904524571570-logs" (OuterVolumeSpecName: "logs") pod "9c24327e-435e-47c7-9547-904524571570" (UID: "9c24327e-435e-47c7-9547-904524571570"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.576831 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c24327e-435e-47c7-9547-904524571570-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.576845 4636 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c24327e-435e-47c7-9547-904524571570-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.580233 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c24327e-435e-47c7-9547-904524571570-kube-api-access-wcf6x" (OuterVolumeSpecName: "kube-api-access-wcf6x") pod "9c24327e-435e-47c7-9547-904524571570" (UID: "9c24327e-435e-47c7-9547-904524571570"). InnerVolumeSpecName "kube-api-access-wcf6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.580264 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9c24327e-435e-47c7-9547-904524571570" (UID: "9c24327e-435e-47c7-9547-904524571570"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.590175 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-scripts" (OuterVolumeSpecName: "scripts") pod "9c24327e-435e-47c7-9547-904524571570" (UID: "9c24327e-435e-47c7-9547-904524571570"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.625091 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c24327e-435e-47c7-9547-904524571570" (UID: "9c24327e-435e-47c7-9547-904524571570"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.647719 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data" (OuterVolumeSpecName: "config-data") pod "9c24327e-435e-47c7-9547-904524571570" (UID: "9c24327e-435e-47c7-9547-904524571570"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.677941 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.677980 4636 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.677990 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.677999 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c24327e-435e-47c7-9547-904524571570-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:14 crc kubenswrapper[4636]: I1002 21:44:14.678008 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcf6x\" (UniqueName: \"kubernetes.io/projected/9c24327e-435e-47c7-9547-904524571570-kube-api-access-wcf6x\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.398802 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf","Type":"ContainerStarted","Data":"c64f7325650e1e35ef9db022db186455a1a89fe816221d77d33d96f459913c65"} Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.399413 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.399122 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="sg-core" containerID="cri-o://e183b3cc16f73e179843fcd9515a558008d7684e1f414df7747234409e6179e4" gracePeriod=30 Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.399126 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="ceilometer-notification-agent" containerID="cri-o://c51734d7e55345c5aa2fbfa7460fdbae42f6f30967460a58671d4d3575407088" gracePeriod=30 Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.399140 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="proxy-httpd" containerID="cri-o://c64f7325650e1e35ef9db022db186455a1a89fe816221d77d33d96f459913c65" gracePeriod=30 Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.398934 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="ceilometer-central-agent" containerID="cri-o://79a86e16c0980c6041172ed51c640852c921f0897e8b5d8a6fe3a6b0b50187ed" gracePeriod=30 Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.405666 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f415d8af-0c69-42dc-b3d5-bb5cfa456768","Type":"ContainerStarted","Data":"710215b807b762d7b94cabf8befb42fdd86659457cabbab1b08853396f1ea975"} Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.409258 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c24327e-435e-47c7-9547-904524571570","Type":"ContainerDied","Data":"26db4c41f3d666e1097fa5313000ddd0d62ef46a50b55d2cd5aaa95e527cb682"} Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.409307 4636 scope.go:117] "RemoveContainer" containerID="2f92401e7a0a1d8862442efa91362be0515ccdffaf59deb392712f609a84f32d" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.409342 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.423953 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.52430155 podStartE2EDuration="14.423935449s" podCreationTimestamp="2025-10-02 21:44:01 +0000 UTC" firstStartedPulling="2025-10-02 21:44:02.265416734 +0000 UTC m=+1233.588424743" lastFinishedPulling="2025-10-02 21:44:14.165050633 +0000 UTC m=+1245.488058642" observedRunningTime="2025-10-02 21:44:15.419242236 +0000 UTC m=+1246.742250265" watchObservedRunningTime="2025-10-02 21:44:15.423935449 +0000 UTC m=+1246.746943468" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.449276 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.778558839 podStartE2EDuration="20.449261526s" podCreationTimestamp="2025-10-02 21:43:55 +0000 UTC" firstStartedPulling="2025-10-02 21:43:56.528458124 +0000 UTC m=+1227.851466143" lastFinishedPulling="2025-10-02 21:44:14.199160811 +0000 UTC m=+1245.522168830" observedRunningTime="2025-10-02 21:44:15.447292894 +0000 UTC m=+1246.770300913" watchObservedRunningTime="2025-10-02 21:44:15.449261526 +0000 UTC m=+1246.772269545" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.450225 4636 scope.go:117] "RemoveContainer" containerID="a5539000836254d6a2cb5ac9a68120dab231e2fd56b9504b7473559513fbeefe" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.469294 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.475924 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.503002 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 02 21:44:15 crc kubenswrapper[4636]: E1002 21:44:15.503374 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api-log" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.503385 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api-log" Oct 02 21:44:15 crc kubenswrapper[4636]: E1002 21:44:15.503401 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.503407 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.503595 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.503609 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c24327e-435e-47c7-9547-904524571570" containerName="cinder-api-log" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.504488 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.506709 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.506916 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.526450 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.531784 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.592369 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-config-data\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.592798 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.592927 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-config-data-custom\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.593043 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/987f22a6-6842-4d5c-ac32-ef7698f66ed1-logs\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.593141 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-public-tls-certs\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.593245 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-scripts\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.593356 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.593453 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/987f22a6-6842-4d5c-ac32-ef7698f66ed1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.593603 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxw9k\" (UniqueName: \"kubernetes.io/projected/987f22a6-6842-4d5c-ac32-ef7698f66ed1-kube-api-access-vxw9k\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.615000 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c24327e-435e-47c7-9547-904524571570" path="/var/lib/kubelet/pods/9c24327e-435e-47c7-9547-904524571570/volumes" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.695244 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-config-data\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.695294 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.695329 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-config-data-custom\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.695374 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/987f22a6-6842-4d5c-ac32-ef7698f66ed1-logs\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.695390 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-public-tls-certs\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.695421 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-scripts\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.695469 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.695491 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/987f22a6-6842-4d5c-ac32-ef7698f66ed1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.695509 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxw9k\" (UniqueName: \"kubernetes.io/projected/987f22a6-6842-4d5c-ac32-ef7698f66ed1-kube-api-access-vxw9k\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.696233 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/987f22a6-6842-4d5c-ac32-ef7698f66ed1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.696324 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/987f22a6-6842-4d5c-ac32-ef7698f66ed1-logs\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.701641 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.702434 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-config-data\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.704383 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-config-data-custom\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.704648 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-scripts\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.704818 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.705531 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/987f22a6-6842-4d5c-ac32-ef7698f66ed1-public-tls-certs\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.711600 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxw9k\" (UniqueName: \"kubernetes.io/projected/987f22a6-6842-4d5c-ac32-ef7698f66ed1-kube-api-access-vxw9k\") pod \"cinder-api-0\" (UID: \"987f22a6-6842-4d5c-ac32-ef7698f66ed1\") " pod="openstack/cinder-api-0" Oct 02 21:44:15 crc kubenswrapper[4636]: I1002 21:44:15.875933 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.460942 4636 generic.go:334] "Generic (PLEG): container finished" podID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerID="c64f7325650e1e35ef9db022db186455a1a89fe816221d77d33d96f459913c65" exitCode=0 Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.461284 4636 generic.go:334] "Generic (PLEG): container finished" podID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerID="e183b3cc16f73e179843fcd9515a558008d7684e1f414df7747234409e6179e4" exitCode=2 Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.461295 4636 generic.go:334] "Generic (PLEG): container finished" podID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerID="c51734d7e55345c5aa2fbfa7460fdbae42f6f30967460a58671d4d3575407088" exitCode=0 Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.461303 4636 generic.go:334] "Generic (PLEG): container finished" podID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerID="79a86e16c0980c6041172ed51c640852c921f0897e8b5d8a6fe3a6b0b50187ed" exitCode=0 Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.461045 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf","Type":"ContainerDied","Data":"c64f7325650e1e35ef9db022db186455a1a89fe816221d77d33d96f459913c65"} Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.461379 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf","Type":"ContainerDied","Data":"e183b3cc16f73e179843fcd9515a558008d7684e1f414df7747234409e6179e4"} Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.461396 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf","Type":"ContainerDied","Data":"c51734d7e55345c5aa2fbfa7460fdbae42f6f30967460a58671d4d3575407088"} Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.461408 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf","Type":"ContainerDied","Data":"79a86e16c0980c6041172ed51c640852c921f0897e8b5d8a6fe3a6b0b50187ed"} Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.567520 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.816732 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.949886 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-sg-core-conf-yaml\") pod \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.949951 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-run-httpd\") pod \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.949995 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-scripts\") pod \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.950032 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-config-data\") pod \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.950070 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hwdk\" (UniqueName: \"kubernetes.io/projected/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-kube-api-access-6hwdk\") pod \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.950145 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-log-httpd\") pod \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.950175 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-combined-ca-bundle\") pod \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\" (UID: \"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf\") " Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.955303 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" (UID: "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.956287 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" (UID: "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.968765 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-scripts" (OuterVolumeSpecName: "scripts") pod "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" (UID: "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:16 crc kubenswrapper[4636]: I1002 21:44:16.968831 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-kube-api-access-6hwdk" (OuterVolumeSpecName: "kube-api-access-6hwdk") pod "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" (UID: "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf"). InnerVolumeSpecName "kube-api-access-6hwdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.011223 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" (UID: "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.061555 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.061582 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hwdk\" (UniqueName: \"kubernetes.io/projected/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-kube-api-access-6hwdk\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.061591 4636 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.061601 4636 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.061609 4636 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.088552 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" (UID: "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.163981 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.194986 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-config-data" (OuterVolumeSpecName: "config-data") pod "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" (UID: "7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.265818 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.477948 4636 generic.go:334] "Generic (PLEG): container finished" podID="6e944a52-400a-4253-afad-9c8ab5abd662" containerID="650af3b5000135c170b1d7f4acdfba6ab8650ff85e59e1fef2a428b5847f6329" exitCode=0 Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.478020 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75d644d86d-xqtl4" event={"ID":"6e944a52-400a-4253-afad-9c8ab5abd662","Type":"ContainerDied","Data":"650af3b5000135c170b1d7f4acdfba6ab8650ff85e59e1fef2a428b5847f6329"} Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.478046 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75d644d86d-xqtl4" event={"ID":"6e944a52-400a-4253-afad-9c8ab5abd662","Type":"ContainerDied","Data":"bc3725dcae8ce3e907f78f883be9d5a2484420a926e8390aedb084ea6346424c"} Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.478056 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc3725dcae8ce3e907f78f883be9d5a2484420a926e8390aedb084ea6346424c" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.481523 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf","Type":"ContainerDied","Data":"96ce1c7f6f857087514487458d46997b64bc6c9ba1ddee7a7c0b78ee8dac2824"} Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.481571 4636 scope.go:117] "RemoveContainer" containerID="c64f7325650e1e35ef9db022db186455a1a89fe816221d77d33d96f459913c65" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.481706 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.497618 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"987f22a6-6842-4d5c-ac32-ef7698f66ed1","Type":"ContainerStarted","Data":"131f7fce92c936280b0356dc5f83dc80f5270bc209c588abac9712a947754ee7"} Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.499566 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.518360 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.538815 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.539091 4636 scope.go:117] "RemoveContainer" containerID="e183b3cc16f73e179843fcd9515a558008d7684e1f414df7747234409e6179e4" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.574359 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-combined-ca-bundle\") pod \"6e944a52-400a-4253-afad-9c8ab5abd662\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.574434 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nnrc\" (UniqueName: \"kubernetes.io/projected/6e944a52-400a-4253-afad-9c8ab5abd662-kube-api-access-9nnrc\") pod \"6e944a52-400a-4253-afad-9c8ab5abd662\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.574502 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-ovndb-tls-certs\") pod \"6e944a52-400a-4253-afad-9c8ab5abd662\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.574518 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-config\") pod \"6e944a52-400a-4253-afad-9c8ab5abd662\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.574547 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-httpd-config\") pod \"6e944a52-400a-4253-afad-9c8ab5abd662\" (UID: \"6e944a52-400a-4253-afad-9c8ab5abd662\") " Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.579482 4636 scope.go:117] "RemoveContainer" containerID="c51734d7e55345c5aa2fbfa7460fdbae42f6f30967460a58671d4d3575407088" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.580172 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e944a52-400a-4253-afad-9c8ab5abd662-kube-api-access-9nnrc" (OuterVolumeSpecName: "kube-api-access-9nnrc") pod "6e944a52-400a-4253-afad-9c8ab5abd662" (UID: "6e944a52-400a-4253-afad-9c8ab5abd662"). InnerVolumeSpecName "kube-api-access-9nnrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592268 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:17 crc kubenswrapper[4636]: E1002 21:44:17.592607 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="ceilometer-central-agent" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592622 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="ceilometer-central-agent" Oct 02 21:44:17 crc kubenswrapper[4636]: E1002 21:44:17.592641 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e944a52-400a-4253-afad-9c8ab5abd662" containerName="neutron-api" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592648 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e944a52-400a-4253-afad-9c8ab5abd662" containerName="neutron-api" Oct 02 21:44:17 crc kubenswrapper[4636]: E1002 21:44:17.592657 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e944a52-400a-4253-afad-9c8ab5abd662" containerName="neutron-httpd" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592663 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e944a52-400a-4253-afad-9c8ab5abd662" containerName="neutron-httpd" Oct 02 21:44:17 crc kubenswrapper[4636]: E1002 21:44:17.592677 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="proxy-httpd" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592684 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="proxy-httpd" Oct 02 21:44:17 crc kubenswrapper[4636]: E1002 21:44:17.592722 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="ceilometer-notification-agent" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592729 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="ceilometer-notification-agent" Oct 02 21:44:17 crc kubenswrapper[4636]: E1002 21:44:17.592739 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="sg-core" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592758 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="sg-core" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592916 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="proxy-httpd" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592930 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="ceilometer-central-agent" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592939 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="sg-core" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592950 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e944a52-400a-4253-afad-9c8ab5abd662" containerName="neutron-httpd" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592959 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e944a52-400a-4253-afad-9c8ab5abd662" containerName="neutron-api" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.592970 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" containerName="ceilometer-notification-agent" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.613196 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "6e944a52-400a-4253-afad-9c8ab5abd662" (UID: "6e944a52-400a-4253-afad-9c8ab5abd662"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.629785 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.631132 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.636684 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.636878 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.641129 4636 scope.go:117] "RemoveContainer" containerID="79a86e16c0980c6041172ed51c640852c921f0897e8b5d8a6fe3a6b0b50187ed" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.653956 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf" path="/var/lib/kubelet/pods/7f3cb7c5-4859-47ad-80aa-6ed1208ab6cf/volumes" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.680877 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.680932 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hk4kk\" (UniqueName: \"kubernetes.io/projected/5475f032-4d39-41bd-a428-cbe0e0b6463a-kube-api-access-hk4kk\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.681011 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-config-data\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.681045 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-run-httpd\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.681094 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-scripts\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.681114 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-log-httpd\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.681162 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.681216 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nnrc\" (UniqueName: \"kubernetes.io/projected/6e944a52-400a-4253-afad-9c8ab5abd662-kube-api-access-9nnrc\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.681227 4636 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.700057 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e944a52-400a-4253-afad-9c8ab5abd662" (UID: "6e944a52-400a-4253-afad-9c8ab5abd662"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.706010 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "6e944a52-400a-4253-afad-9c8ab5abd662" (UID: "6e944a52-400a-4253-afad-9c8ab5abd662"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.717303 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-config" (OuterVolumeSpecName: "config") pod "6e944a52-400a-4253-afad-9c8ab5abd662" (UID: "6e944a52-400a-4253-afad-9c8ab5abd662"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.784385 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-config-data\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.784432 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-run-httpd\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.784472 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-scripts\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.784488 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-log-httpd\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.784523 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.784623 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.784652 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hk4kk\" (UniqueName: \"kubernetes.io/projected/5475f032-4d39-41bd-a428-cbe0e0b6463a-kube-api-access-hk4kk\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.784696 4636 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.784706 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.784716 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e944a52-400a-4253-afad-9c8ab5abd662-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.789833 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-run-httpd\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.791050 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-log-httpd\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.791372 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.791845 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-config-data\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.793230 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-scripts\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.794295 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.802955 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hk4kk\" (UniqueName: \"kubernetes.io/projected/5475f032-4d39-41bd-a428-cbe0e0b6463a-kube-api-access-hk4kk\") pod \"ceilometer-0\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " pod="openstack/ceilometer-0" Oct 02 21:44:17 crc kubenswrapper[4636]: I1002 21:44:17.983030 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.476106 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7646d88f4d-85mgl" podUID="65063729-cda3-488f-8e94-364db15e2d2d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.476582 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.477425 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"0d50d981993a8c7a14f5be874fc82c71a331452cefb9991b2bc42383c51602c1"} pod="openstack/horizon-7646d88f4d-85mgl" containerMessage="Container horizon failed startup probe, will be restarted" Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.477464 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7646d88f4d-85mgl" podUID="65063729-cda3-488f-8e94-364db15e2d2d" containerName="horizon" containerID="cri-o://0d50d981993a8c7a14f5be874fc82c71a331452cefb9991b2bc42383c51602c1" gracePeriod=30 Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.506843 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-75d644d86d-xqtl4" Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.506870 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"987f22a6-6842-4d5c-ac32-ef7698f66ed1","Type":"ContainerStarted","Data":"1e521d16b3a40308a93544a14dd61bad48628978988d3d8e66f020c88a4b95c8"} Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.506909 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"987f22a6-6842-4d5c-ac32-ef7698f66ed1","Type":"ContainerStarted","Data":"00fb2d708191def6d72ef1b0d0327f4fcf850046a1e4a60a00f56fe83149b5a9"} Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.507043 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.532207 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.5321937869999998 podStartE2EDuration="3.532193787s" podCreationTimestamp="2025-10-02 21:44:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:44:18.52547487 +0000 UTC m=+1249.848482889" watchObservedRunningTime="2025-10-02 21:44:18.532193787 +0000 UTC m=+1249.855201806" Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.547882 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-75d644d86d-xqtl4"] Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.555997 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-75d644d86d-xqtl4"] Oct 02 21:44:18 crc kubenswrapper[4636]: I1002 21:44:18.573006 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:19 crc kubenswrapper[4636]: I1002 21:44:19.517661 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5475f032-4d39-41bd-a428-cbe0e0b6463a","Type":"ContainerStarted","Data":"c564cb8d4ac193d877dd4bfddfa32267b62c1a53fbaaa1087a75529d09c74822"} Oct 02 21:44:19 crc kubenswrapper[4636]: I1002 21:44:19.518115 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5475f032-4d39-41bd-a428-cbe0e0b6463a","Type":"ContainerStarted","Data":"4fdbc6f38ccdcf642a26d990a61b448ead1b4ad581d1170b4cfd3185af5e187d"} Oct 02 21:44:19 crc kubenswrapper[4636]: I1002 21:44:19.614507 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e944a52-400a-4253-afad-9c8ab5abd662" path="/var/lib/kubelet/pods/6e944a52-400a-4253-afad-9c8ab5abd662/volumes" Oct 02 21:44:19 crc kubenswrapper[4636]: I1002 21:44:19.618489 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:20 crc kubenswrapper[4636]: I1002 21:44:20.526841 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5475f032-4d39-41bd-a428-cbe0e0b6463a","Type":"ContainerStarted","Data":"53ef157e750c2438e21204ab33427c71011a8a6495d6b2f70e5295c920d0441b"} Oct 02 21:44:21 crc kubenswrapper[4636]: I1002 21:44:21.537028 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5475f032-4d39-41bd-a428-cbe0e0b6463a","Type":"ContainerStarted","Data":"7dc600804edd8d87ed7d9a6c1867642643fbc0f7ed05686c535728ecfb282f1b"} Oct 02 21:44:22 crc kubenswrapper[4636]: I1002 21:44:22.547909 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5475f032-4d39-41bd-a428-cbe0e0b6463a","Type":"ContainerStarted","Data":"93a1499bd29d0189f1d3b8d1f4861f1e450f4b555fbdbe67a5a5bc23cb5acd2e"} Oct 02 21:44:22 crc kubenswrapper[4636]: I1002 21:44:22.548064 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="ceilometer-central-agent" containerID="cri-o://c564cb8d4ac193d877dd4bfddfa32267b62c1a53fbaaa1087a75529d09c74822" gracePeriod=30 Oct 02 21:44:22 crc kubenswrapper[4636]: I1002 21:44:22.548473 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 02 21:44:22 crc kubenswrapper[4636]: I1002 21:44:22.548495 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="proxy-httpd" containerID="cri-o://93a1499bd29d0189f1d3b8d1f4861f1e450f4b555fbdbe67a5a5bc23cb5acd2e" gracePeriod=30 Oct 02 21:44:22 crc kubenswrapper[4636]: I1002 21:44:22.548516 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="sg-core" containerID="cri-o://7dc600804edd8d87ed7d9a6c1867642643fbc0f7ed05686c535728ecfb282f1b" gracePeriod=30 Oct 02 21:44:22 crc kubenswrapper[4636]: I1002 21:44:22.548540 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="ceilometer-notification-agent" containerID="cri-o://53ef157e750c2438e21204ab33427c71011a8a6495d6b2f70e5295c920d0441b" gracePeriod=30 Oct 02 21:44:22 crc kubenswrapper[4636]: I1002 21:44:22.569869 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9410038539999999 podStartE2EDuration="5.569852242s" podCreationTimestamp="2025-10-02 21:44:17 +0000 UTC" firstStartedPulling="2025-10-02 21:44:18.568864873 +0000 UTC m=+1249.891872892" lastFinishedPulling="2025-10-02 21:44:22.197713261 +0000 UTC m=+1253.520721280" observedRunningTime="2025-10-02 21:44:22.567605703 +0000 UTC m=+1253.890613742" watchObservedRunningTime="2025-10-02 21:44:22.569852242 +0000 UTC m=+1253.892860261" Oct 02 21:44:23 crc kubenswrapper[4636]: I1002 21:44:23.117197 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:44:23 crc kubenswrapper[4636]: I1002 21:44:23.117255 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:44:23 crc kubenswrapper[4636]: I1002 21:44:23.559841 4636 generic.go:334] "Generic (PLEG): container finished" podID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerID="93a1499bd29d0189f1d3b8d1f4861f1e450f4b555fbdbe67a5a5bc23cb5acd2e" exitCode=0 Oct 02 21:44:23 crc kubenswrapper[4636]: I1002 21:44:23.559871 4636 generic.go:334] "Generic (PLEG): container finished" podID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerID="7dc600804edd8d87ed7d9a6c1867642643fbc0f7ed05686c535728ecfb282f1b" exitCode=2 Oct 02 21:44:23 crc kubenswrapper[4636]: I1002 21:44:23.559883 4636 generic.go:334] "Generic (PLEG): container finished" podID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerID="53ef157e750c2438e21204ab33427c71011a8a6495d6b2f70e5295c920d0441b" exitCode=0 Oct 02 21:44:23 crc kubenswrapper[4636]: I1002 21:44:23.559903 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5475f032-4d39-41bd-a428-cbe0e0b6463a","Type":"ContainerDied","Data":"93a1499bd29d0189f1d3b8d1f4861f1e450f4b555fbdbe67a5a5bc23cb5acd2e"} Oct 02 21:44:23 crc kubenswrapper[4636]: I1002 21:44:23.559932 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5475f032-4d39-41bd-a428-cbe0e0b6463a","Type":"ContainerDied","Data":"7dc600804edd8d87ed7d9a6c1867642643fbc0f7ed05686c535728ecfb282f1b"} Oct 02 21:44:23 crc kubenswrapper[4636]: I1002 21:44:23.559944 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5475f032-4d39-41bd-a428-cbe0e0b6463a","Type":"ContainerDied","Data":"53ef157e750c2438e21204ab33427c71011a8a6495d6b2f70e5295c920d0441b"} Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.582229 4636 generic.go:334] "Generic (PLEG): container finished" podID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerID="c564cb8d4ac193d877dd4bfddfa32267b62c1a53fbaaa1087a75529d09c74822" exitCode=0 Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.582362 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5475f032-4d39-41bd-a428-cbe0e0b6463a","Type":"ContainerDied","Data":"c564cb8d4ac193d877dd4bfddfa32267b62c1a53fbaaa1087a75529d09c74822"} Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.666087 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.738412 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-sg-core-conf-yaml\") pod \"5475f032-4d39-41bd-a428-cbe0e0b6463a\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.738484 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hk4kk\" (UniqueName: \"kubernetes.io/projected/5475f032-4d39-41bd-a428-cbe0e0b6463a-kube-api-access-hk4kk\") pod \"5475f032-4d39-41bd-a428-cbe0e0b6463a\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.738516 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-combined-ca-bundle\") pod \"5475f032-4d39-41bd-a428-cbe0e0b6463a\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.738562 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-scripts\") pod \"5475f032-4d39-41bd-a428-cbe0e0b6463a\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.738586 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-log-httpd\") pod \"5475f032-4d39-41bd-a428-cbe0e0b6463a\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.738680 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-run-httpd\") pod \"5475f032-4d39-41bd-a428-cbe0e0b6463a\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.738712 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-config-data\") pod \"5475f032-4d39-41bd-a428-cbe0e0b6463a\" (UID: \"5475f032-4d39-41bd-a428-cbe0e0b6463a\") " Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.739501 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5475f032-4d39-41bd-a428-cbe0e0b6463a" (UID: "5475f032-4d39-41bd-a428-cbe0e0b6463a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.739536 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5475f032-4d39-41bd-a428-cbe0e0b6463a" (UID: "5475f032-4d39-41bd-a428-cbe0e0b6463a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.744631 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5475f032-4d39-41bd-a428-cbe0e0b6463a-kube-api-access-hk4kk" (OuterVolumeSpecName: "kube-api-access-hk4kk") pod "5475f032-4d39-41bd-a428-cbe0e0b6463a" (UID: "5475f032-4d39-41bd-a428-cbe0e0b6463a"). InnerVolumeSpecName "kube-api-access-hk4kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.753281 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-scripts" (OuterVolumeSpecName: "scripts") pod "5475f032-4d39-41bd-a428-cbe0e0b6463a" (UID: "5475f032-4d39-41bd-a428-cbe0e0b6463a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.773028 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5475f032-4d39-41bd-a428-cbe0e0b6463a" (UID: "5475f032-4d39-41bd-a428-cbe0e0b6463a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.841270 4636 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.841302 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hk4kk\" (UniqueName: \"kubernetes.io/projected/5475f032-4d39-41bd-a428-cbe0e0b6463a-kube-api-access-hk4kk\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.841314 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.841323 4636 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.841332 4636 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5475f032-4d39-41bd-a428-cbe0e0b6463a-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.846456 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5475f032-4d39-41bd-a428-cbe0e0b6463a" (UID: "5475f032-4d39-41bd-a428-cbe0e0b6463a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.869023 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-config-data" (OuterVolumeSpecName: "config-data") pod "5475f032-4d39-41bd-a428-cbe0e0b6463a" (UID: "5475f032-4d39-41bd-a428-cbe0e0b6463a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.976267 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:25 crc kubenswrapper[4636]: I1002 21:44:25.976304 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5475f032-4d39-41bd-a428-cbe0e0b6463a-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.592710 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5475f032-4d39-41bd-a428-cbe0e0b6463a","Type":"ContainerDied","Data":"4fdbc6f38ccdcf642a26d990a61b448ead1b4ad581d1170b4cfd3185af5e187d"} Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.593001 4636 scope.go:117] "RemoveContainer" containerID="93a1499bd29d0189f1d3b8d1f4861f1e450f4b555fbdbe67a5a5bc23cb5acd2e" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.593136 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.631042 4636 scope.go:117] "RemoveContainer" containerID="7dc600804edd8d87ed7d9a6c1867642643fbc0f7ed05686c535728ecfb282f1b" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.631846 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.655886 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.674106 4636 scope.go:117] "RemoveContainer" containerID="53ef157e750c2438e21204ab33427c71011a8a6495d6b2f70e5295c920d0441b" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.687782 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:26 crc kubenswrapper[4636]: E1002 21:44:26.688951 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="ceilometer-notification-agent" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.688976 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="ceilometer-notification-agent" Oct 02 21:44:26 crc kubenswrapper[4636]: E1002 21:44:26.689005 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="sg-core" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.689011 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="sg-core" Oct 02 21:44:26 crc kubenswrapper[4636]: E1002 21:44:26.689032 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="proxy-httpd" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.689037 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="proxy-httpd" Oct 02 21:44:26 crc kubenswrapper[4636]: E1002 21:44:26.689056 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="ceilometer-central-agent" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.689064 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="ceilometer-central-agent" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.689392 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="ceilometer-notification-agent" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.689416 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="ceilometer-central-agent" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.689434 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="proxy-httpd" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.689448 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" containerName="sg-core" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.692651 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.704629 4636 scope.go:117] "RemoveContainer" containerID="c564cb8d4ac193d877dd4bfddfa32267b62c1a53fbaaa1087a75529d09c74822" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.706852 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.707112 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.729123 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.793417 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.793459 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.793583 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-run-httpd\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.793607 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-log-httpd\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.793638 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-config-data\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.793654 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-scripts\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.793680 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cjq4\" (UniqueName: \"kubernetes.io/projected/02518a58-8609-4307-a40f-b5f4c752a20b-kube-api-access-8cjq4\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.898017 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-run-httpd\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.898075 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-log-httpd\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.898111 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-config-data\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.898148 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-scripts\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.898177 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cjq4\" (UniqueName: \"kubernetes.io/projected/02518a58-8609-4307-a40f-b5f4c752a20b-kube-api-access-8cjq4\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.898225 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.898240 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.905588 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-scripts\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.905997 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-run-httpd\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.906150 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-log-httpd\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.913605 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.913926 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.946727 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-config-data\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:26 crc kubenswrapper[4636]: I1002 21:44:26.977263 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cjq4\" (UniqueName: \"kubernetes.io/projected/02518a58-8609-4307-a40f-b5f4c752a20b-kube-api-access-8cjq4\") pod \"ceilometer-0\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " pod="openstack/ceilometer-0" Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.046786 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.257798 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.258024 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5e40a0d0-1f71-4064-924c-df6addeee8b1" containerName="glance-log" containerID="cri-o://348097f1c42464400e4a64922a142283693b09203a5145ea056b31d48fa9dcbc" gracePeriod=30 Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.258415 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5e40a0d0-1f71-4064-924c-df6addeee8b1" containerName="glance-httpd" containerID="cri-o://ddbd9b71f0e9550a9082a3b68f16b1b95edcc5aa1b2c1a592734dcf201ce2d96" gracePeriod=30 Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.601404 4636 generic.go:334] "Generic (PLEG): container finished" podID="5e40a0d0-1f71-4064-924c-df6addeee8b1" containerID="348097f1c42464400e4a64922a142283693b09203a5145ea056b31d48fa9dcbc" exitCode=143 Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.601573 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5e40a0d0-1f71-4064-924c-df6addeee8b1","Type":"ContainerDied","Data":"348097f1c42464400e4a64922a142283693b09203a5145ea056b31d48fa9dcbc"} Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.613476 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5475f032-4d39-41bd-a428-cbe0e0b6463a" path="/var/lib/kubelet/pods/5475f032-4d39-41bd-a428-cbe0e0b6463a/volumes" Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.692204 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.888146 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-vbvdn"] Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.889322 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vbvdn" Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.894926 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-vbvdn"] Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.963466 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-q9x4w"] Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.964597 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q9x4w" Oct 02 21:44:27 crc kubenswrapper[4636]: I1002 21:44:27.979951 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-q9x4w"] Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.022785 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xq6f\" (UniqueName: \"kubernetes.io/projected/01304c15-3331-4085-8769-2620145a308a-kube-api-access-9xq6f\") pod \"nova-cell0-db-create-q9x4w\" (UID: \"01304c15-3331-4085-8769-2620145a308a\") " pod="openstack/nova-cell0-db-create-q9x4w" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.022863 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppnsh\" (UniqueName: \"kubernetes.io/projected/68852986-2dd4-43e8-ac5d-acbe811855ca-kube-api-access-ppnsh\") pod \"nova-api-db-create-vbvdn\" (UID: \"68852986-2dd4-43e8-ac5d-acbe811855ca\") " pod="openstack/nova-api-db-create-vbvdn" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.065381 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-4zfc2"] Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.066771 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4zfc2" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.075010 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-4zfc2"] Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.124756 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppnsh\" (UniqueName: \"kubernetes.io/projected/68852986-2dd4-43e8-ac5d-acbe811855ca-kube-api-access-ppnsh\") pod \"nova-api-db-create-vbvdn\" (UID: \"68852986-2dd4-43e8-ac5d-acbe811855ca\") " pod="openstack/nova-api-db-create-vbvdn" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.124840 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrlh5\" (UniqueName: \"kubernetes.io/projected/b8e71d83-78a0-4b7f-b759-c36325da0561-kube-api-access-qrlh5\") pod \"nova-cell1-db-create-4zfc2\" (UID: \"b8e71d83-78a0-4b7f-b759-c36325da0561\") " pod="openstack/nova-cell1-db-create-4zfc2" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.124971 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xq6f\" (UniqueName: \"kubernetes.io/projected/01304c15-3331-4085-8769-2620145a308a-kube-api-access-9xq6f\") pod \"nova-cell0-db-create-q9x4w\" (UID: \"01304c15-3331-4085-8769-2620145a308a\") " pod="openstack/nova-cell0-db-create-q9x4w" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.152779 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppnsh\" (UniqueName: \"kubernetes.io/projected/68852986-2dd4-43e8-ac5d-acbe811855ca-kube-api-access-ppnsh\") pod \"nova-api-db-create-vbvdn\" (UID: \"68852986-2dd4-43e8-ac5d-acbe811855ca\") " pod="openstack/nova-api-db-create-vbvdn" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.152829 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xq6f\" (UniqueName: \"kubernetes.io/projected/01304c15-3331-4085-8769-2620145a308a-kube-api-access-9xq6f\") pod \"nova-cell0-db-create-q9x4w\" (UID: \"01304c15-3331-4085-8769-2620145a308a\") " pod="openstack/nova-cell0-db-create-q9x4w" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.226724 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrlh5\" (UniqueName: \"kubernetes.io/projected/b8e71d83-78a0-4b7f-b759-c36325da0561-kube-api-access-qrlh5\") pod \"nova-cell1-db-create-4zfc2\" (UID: \"b8e71d83-78a0-4b7f-b759-c36325da0561\") " pod="openstack/nova-cell1-db-create-4zfc2" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.233122 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vbvdn" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.243408 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrlh5\" (UniqueName: \"kubernetes.io/projected/b8e71d83-78a0-4b7f-b759-c36325da0561-kube-api-access-qrlh5\") pod \"nova-cell1-db-create-4zfc2\" (UID: \"b8e71d83-78a0-4b7f-b759-c36325da0561\") " pod="openstack/nova-cell1-db-create-4zfc2" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.284494 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q9x4w" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.402185 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4zfc2" Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.625922 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02518a58-8609-4307-a40f-b5f4c752a20b","Type":"ContainerStarted","Data":"9a48328728f502468b54ef65ff007744cc27809dd4272837f6738b9d2fa8c7df"} Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.859907 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-vbvdn"] Oct 02 21:44:28 crc kubenswrapper[4636]: I1002 21:44:28.993818 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-4zfc2"] Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.000728 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-q9x4w"] Oct 02 21:44:29 crc kubenswrapper[4636]: W1002 21:44:29.008990 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01304c15_3331_4085_8769_2620145a308a.slice/crio-7c4d55eca6c95c7a89e33c5a603f0ad0476205d40f8dfa4aea331c42f6415fff WatchSource:0}: Error finding container 7c4d55eca6c95c7a89e33c5a603f0ad0476205d40f8dfa4aea331c42f6415fff: Status 404 returned error can't find the container with id 7c4d55eca6c95c7a89e33c5a603f0ad0476205d40f8dfa4aea331c42f6415fff Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.213306 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.636129 4636 generic.go:334] "Generic (PLEG): container finished" podID="01304c15-3331-4085-8769-2620145a308a" containerID="b07381b2d10c632073e6d760a8bbc5af2236f21bfe24d32ee6f38a6a7e162cf5" exitCode=0 Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.637142 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-q9x4w" event={"ID":"01304c15-3331-4085-8769-2620145a308a","Type":"ContainerDied","Data":"b07381b2d10c632073e6d760a8bbc5af2236f21bfe24d32ee6f38a6a7e162cf5"} Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.637247 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-q9x4w" event={"ID":"01304c15-3331-4085-8769-2620145a308a","Type":"ContainerStarted","Data":"7c4d55eca6c95c7a89e33c5a603f0ad0476205d40f8dfa4aea331c42f6415fff"} Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.645053 4636 generic.go:334] "Generic (PLEG): container finished" podID="68852986-2dd4-43e8-ac5d-acbe811855ca" containerID="9751da6246c8ee3ec6139a9c94ad9e577e88874131e36a9beee5a43fdc433ab8" exitCode=0 Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.645241 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vbvdn" event={"ID":"68852986-2dd4-43e8-ac5d-acbe811855ca","Type":"ContainerDied","Data":"9751da6246c8ee3ec6139a9c94ad9e577e88874131e36a9beee5a43fdc433ab8"} Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.645358 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vbvdn" event={"ID":"68852986-2dd4-43e8-ac5d-acbe811855ca","Type":"ContainerStarted","Data":"902492980a170829cd7a566c217d16cfa9551ea93b24e20f5f91701b14fb4e3d"} Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.648302 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02518a58-8609-4307-a40f-b5f4c752a20b","Type":"ContainerStarted","Data":"a107f74dd482df6555763c13d34a835b753956f8884f0a352f4dd3cb79637c8d"} Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.648344 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02518a58-8609-4307-a40f-b5f4c752a20b","Type":"ContainerStarted","Data":"19d63bc424331622c2b10912d4d245c03857f79e8143272c52ea941e13b08d54"} Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.650145 4636 generic.go:334] "Generic (PLEG): container finished" podID="b8e71d83-78a0-4b7f-b759-c36325da0561" containerID="92aac92d398855a09f27798c9cd67a0d12a0b8806635b3c842184ee1eee83016" exitCode=0 Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.650294 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-4zfc2" event={"ID":"b8e71d83-78a0-4b7f-b759-c36325da0561","Type":"ContainerDied","Data":"92aac92d398855a09f27798c9cd67a0d12a0b8806635b3c842184ee1eee83016"} Oct 02 21:44:29 crc kubenswrapper[4636]: I1002 21:44:29.650498 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-4zfc2" event={"ID":"b8e71d83-78a0-4b7f-b759-c36325da0561","Type":"ContainerStarted","Data":"4be0203a8da06945a62d613ff3328ef780cd08f94f11caba32ddeee0e1ed607b"} Oct 02 21:44:30 crc kubenswrapper[4636]: I1002 21:44:30.571290 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:44:30 crc kubenswrapper[4636]: I1002 21:44:30.572070 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c40074e6-e952-4d2f-b14d-2860b9eba108" containerName="glance-log" containerID="cri-o://6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d" gracePeriod=30 Oct 02 21:44:30 crc kubenswrapper[4636]: I1002 21:44:30.575735 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c40074e6-e952-4d2f-b14d-2860b9eba108" containerName="glance-httpd" containerID="cri-o://2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5" gracePeriod=30 Oct 02 21:44:30 crc kubenswrapper[4636]: I1002 21:44:30.663429 4636 generic.go:334] "Generic (PLEG): container finished" podID="5e40a0d0-1f71-4064-924c-df6addeee8b1" containerID="ddbd9b71f0e9550a9082a3b68f16b1b95edcc5aa1b2c1a592734dcf201ce2d96" exitCode=0 Oct 02 21:44:30 crc kubenswrapper[4636]: I1002 21:44:30.664048 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5e40a0d0-1f71-4064-924c-df6addeee8b1","Type":"ContainerDied","Data":"ddbd9b71f0e9550a9082a3b68f16b1b95edcc5aa1b2c1a592734dcf201ce2d96"} Oct 02 21:44:30 crc kubenswrapper[4636]: I1002 21:44:30.673562 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02518a58-8609-4307-a40f-b5f4c752a20b","Type":"ContainerStarted","Data":"a9e60c3abc2d3b2585a1e3e9413487d89625505cee4b8faefea84bc21294623e"} Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.001573 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vbvdn" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.138683 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppnsh\" (UniqueName: \"kubernetes.io/projected/68852986-2dd4-43e8-ac5d-acbe811855ca-kube-api-access-ppnsh\") pod \"68852986-2dd4-43e8-ac5d-acbe811855ca\" (UID: \"68852986-2dd4-43e8-ac5d-acbe811855ca\") " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.144791 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68852986-2dd4-43e8-ac5d-acbe811855ca-kube-api-access-ppnsh" (OuterVolumeSpecName: "kube-api-access-ppnsh") pod "68852986-2dd4-43e8-ac5d-acbe811855ca" (UID: "68852986-2dd4-43e8-ac5d-acbe811855ca"). InnerVolumeSpecName "kube-api-access-ppnsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.160282 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4zfc2" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.243712 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrlh5\" (UniqueName: \"kubernetes.io/projected/b8e71d83-78a0-4b7f-b759-c36325da0561-kube-api-access-qrlh5\") pod \"b8e71d83-78a0-4b7f-b759-c36325da0561\" (UID: \"b8e71d83-78a0-4b7f-b759-c36325da0561\") " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.244102 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppnsh\" (UniqueName: \"kubernetes.io/projected/68852986-2dd4-43e8-ac5d-acbe811855ca-kube-api-access-ppnsh\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.256656 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8e71d83-78a0-4b7f-b759-c36325da0561-kube-api-access-qrlh5" (OuterVolumeSpecName: "kube-api-access-qrlh5") pod "b8e71d83-78a0-4b7f-b759-c36325da0561" (UID: "b8e71d83-78a0-4b7f-b759-c36325da0561"). InnerVolumeSpecName "kube-api-access-qrlh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.348822 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrlh5\" (UniqueName: \"kubernetes.io/projected/b8e71d83-78a0-4b7f-b759-c36325da0561-kube-api-access-qrlh5\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.416628 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q9x4w" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.422430 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.552218 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-scripts\") pod \"5e40a0d0-1f71-4064-924c-df6addeee8b1\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.552261 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-combined-ca-bundle\") pod \"5e40a0d0-1f71-4064-924c-df6addeee8b1\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.552313 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hn8qr\" (UniqueName: \"kubernetes.io/projected/5e40a0d0-1f71-4064-924c-df6addeee8b1-kube-api-access-hn8qr\") pod \"5e40a0d0-1f71-4064-924c-df6addeee8b1\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.552401 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-config-data\") pod \"5e40a0d0-1f71-4064-924c-df6addeee8b1\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.552449 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-httpd-run\") pod \"5e40a0d0-1f71-4064-924c-df6addeee8b1\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.552492 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xq6f\" (UniqueName: \"kubernetes.io/projected/01304c15-3331-4085-8769-2620145a308a-kube-api-access-9xq6f\") pod \"01304c15-3331-4085-8769-2620145a308a\" (UID: \"01304c15-3331-4085-8769-2620145a308a\") " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.552524 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"5e40a0d0-1f71-4064-924c-df6addeee8b1\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.552600 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-logs\") pod \"5e40a0d0-1f71-4064-924c-df6addeee8b1\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.552617 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-public-tls-certs\") pod \"5e40a0d0-1f71-4064-924c-df6addeee8b1\" (UID: \"5e40a0d0-1f71-4064-924c-df6addeee8b1\") " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.553038 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5e40a0d0-1f71-4064-924c-df6addeee8b1" (UID: "5e40a0d0-1f71-4064-924c-df6addeee8b1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.553134 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-logs" (OuterVolumeSpecName: "logs") pod "5e40a0d0-1f71-4064-924c-df6addeee8b1" (UID: "5e40a0d0-1f71-4064-924c-df6addeee8b1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.557979 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01304c15-3331-4085-8769-2620145a308a-kube-api-access-9xq6f" (OuterVolumeSpecName: "kube-api-access-9xq6f") pod "01304c15-3331-4085-8769-2620145a308a" (UID: "01304c15-3331-4085-8769-2620145a308a"). InnerVolumeSpecName "kube-api-access-9xq6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.559028 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "5e40a0d0-1f71-4064-924c-df6addeee8b1" (UID: "5e40a0d0-1f71-4064-924c-df6addeee8b1"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.571873 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-scripts" (OuterVolumeSpecName: "scripts") pod "5e40a0d0-1f71-4064-924c-df6addeee8b1" (UID: "5e40a0d0-1f71-4064-924c-df6addeee8b1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.583176 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e40a0d0-1f71-4064-924c-df6addeee8b1-kube-api-access-hn8qr" (OuterVolumeSpecName: "kube-api-access-hn8qr") pod "5e40a0d0-1f71-4064-924c-df6addeee8b1" (UID: "5e40a0d0-1f71-4064-924c-df6addeee8b1"). InnerVolumeSpecName "kube-api-access-hn8qr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.654945 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hn8qr\" (UniqueName: \"kubernetes.io/projected/5e40a0d0-1f71-4064-924c-df6addeee8b1-kube-api-access-hn8qr\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.654982 4636 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.655005 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xq6f\" (UniqueName: \"kubernetes.io/projected/01304c15-3331-4085-8769-2620145a308a-kube-api-access-9xq6f\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.655028 4636 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.655038 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e40a0d0-1f71-4064-924c-df6addeee8b1-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.655046 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.664828 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-config-data" (OuterVolumeSpecName: "config-data") pod "5e40a0d0-1f71-4064-924c-df6addeee8b1" (UID: "5e40a0d0-1f71-4064-924c-df6addeee8b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.674737 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5e40a0d0-1f71-4064-924c-df6addeee8b1" (UID: "5e40a0d0-1f71-4064-924c-df6addeee8b1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.676531 4636 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.684605 4636 generic.go:334] "Generic (PLEG): container finished" podID="c40074e6-e952-4d2f-b14d-2860b9eba108" containerID="6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d" exitCode=143 Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.687943 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4zfc2" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.693177 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q9x4w" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.704788 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.712893 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e40a0d0-1f71-4064-924c-df6addeee8b1" (UID: "5e40a0d0-1f71-4064-924c-df6addeee8b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.713384 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vbvdn" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.755262 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c40074e6-e952-4d2f-b14d-2860b9eba108","Type":"ContainerDied","Data":"6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d"} Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.755304 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-4zfc2" event={"ID":"b8e71d83-78a0-4b7f-b759-c36325da0561","Type":"ContainerDied","Data":"4be0203a8da06945a62d613ff3328ef780cd08f94f11caba32ddeee0e1ed607b"} Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.755320 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4be0203a8da06945a62d613ff3328ef780cd08f94f11caba32ddeee0e1ed607b" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.755331 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-q9x4w" event={"ID":"01304c15-3331-4085-8769-2620145a308a","Type":"ContainerDied","Data":"7c4d55eca6c95c7a89e33c5a603f0ad0476205d40f8dfa4aea331c42f6415fff"} Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.755341 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c4d55eca6c95c7a89e33c5a603f0ad0476205d40f8dfa4aea331c42f6415fff" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.755348 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5e40a0d0-1f71-4064-924c-df6addeee8b1","Type":"ContainerDied","Data":"09e0902ed5559e590cb236400c26fcbb1b66815e41d222099e5191a4e2c0be64"} Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.755362 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vbvdn" event={"ID":"68852986-2dd4-43e8-ac5d-acbe811855ca","Type":"ContainerDied","Data":"902492980a170829cd7a566c217d16cfa9551ea93b24e20f5f91701b14fb4e3d"} Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.755371 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="902492980a170829cd7a566c217d16cfa9551ea93b24e20f5f91701b14fb4e3d" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.755389 4636 scope.go:117] "RemoveContainer" containerID="ddbd9b71f0e9550a9082a3b68f16b1b95edcc5aa1b2c1a592734dcf201ce2d96" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.756221 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.756260 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.756269 4636 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.756278 4636 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e40a0d0-1f71-4064-924c-df6addeee8b1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:31 crc kubenswrapper[4636]: I1002 21:44:31.826330 4636 scope.go:117] "RemoveContainer" containerID="348097f1c42464400e4a64922a142283693b09203a5145ea056b31d48fa9dcbc" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.045594 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.055655 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.091794 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:44:32 crc kubenswrapper[4636]: E1002 21:44:32.092472 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e40a0d0-1f71-4064-924c-df6addeee8b1" containerName="glance-log" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.092493 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e40a0d0-1f71-4064-924c-df6addeee8b1" containerName="glance-log" Oct 02 21:44:32 crc kubenswrapper[4636]: E1002 21:44:32.092504 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e71d83-78a0-4b7f-b759-c36325da0561" containerName="mariadb-database-create" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.092510 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e71d83-78a0-4b7f-b759-c36325da0561" containerName="mariadb-database-create" Oct 02 21:44:32 crc kubenswrapper[4636]: E1002 21:44:32.092538 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68852986-2dd4-43e8-ac5d-acbe811855ca" containerName="mariadb-database-create" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.092544 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="68852986-2dd4-43e8-ac5d-acbe811855ca" containerName="mariadb-database-create" Oct 02 21:44:32 crc kubenswrapper[4636]: E1002 21:44:32.092553 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e40a0d0-1f71-4064-924c-df6addeee8b1" containerName="glance-httpd" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.092559 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e40a0d0-1f71-4064-924c-df6addeee8b1" containerName="glance-httpd" Oct 02 21:44:32 crc kubenswrapper[4636]: E1002 21:44:32.092572 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01304c15-3331-4085-8769-2620145a308a" containerName="mariadb-database-create" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.092578 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="01304c15-3331-4085-8769-2620145a308a" containerName="mariadb-database-create" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.092766 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8e71d83-78a0-4b7f-b759-c36325da0561" containerName="mariadb-database-create" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.092776 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="68852986-2dd4-43e8-ac5d-acbe811855ca" containerName="mariadb-database-create" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.092789 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e40a0d0-1f71-4064-924c-df6addeee8b1" containerName="glance-log" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.092798 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e40a0d0-1f71-4064-924c-df6addeee8b1" containerName="glance-httpd" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.092813 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="01304c15-3331-4085-8769-2620145a308a" containerName="mariadb-database-create" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.093677 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.095465 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.097024 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.108694 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.263569 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.263819 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-logs\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.263930 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.264026 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfw9c\" (UniqueName: \"kubernetes.io/projected/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-kube-api-access-rfw9c\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.264077 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-scripts\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.264108 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.264196 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-config-data\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.264238 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.366128 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-config-data\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.366177 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.366250 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.366268 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-logs\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.366299 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.366330 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfw9c\" (UniqueName: \"kubernetes.io/projected/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-kube-api-access-rfw9c\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.366348 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-scripts\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.366380 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.367276 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.367457 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.367544 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-logs\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.375108 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.375328 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.378662 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-scripts\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.379062 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-config-data\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.401418 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfw9c\" (UniqueName: \"kubernetes.io/projected/1f3dba52-50a6-4789-a799-4e24bbb6e5ab-kube-api-access-rfw9c\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.405081 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"1f3dba52-50a6-4789-a799-4e24bbb6e5ab\") " pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.708402 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.736785 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02518a58-8609-4307-a40f-b5f4c752a20b","Type":"ContainerStarted","Data":"d35436f4ae0c5f959e14eb102c9efdbf5b23e394b28625c501fabd282e48bef3"} Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.738151 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 02 21:44:32 crc kubenswrapper[4636]: I1002 21:44:32.767138 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.219626169 podStartE2EDuration="6.767116373s" podCreationTimestamp="2025-10-02 21:44:26 +0000 UTC" firstStartedPulling="2025-10-02 21:44:27.69002478 +0000 UTC m=+1259.013032799" lastFinishedPulling="2025-10-02 21:44:32.237514984 +0000 UTC m=+1263.560523003" observedRunningTime="2025-10-02 21:44:32.764198236 +0000 UTC m=+1264.087206255" watchObservedRunningTime="2025-10-02 21:44:32.767116373 +0000 UTC m=+1264.090124402" Oct 02 21:44:33 crc kubenswrapper[4636]: I1002 21:44:33.224850 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 02 21:44:33 crc kubenswrapper[4636]: W1002 21:44:33.229948 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f3dba52_50a6_4789_a799_4e24bbb6e5ab.slice/crio-24b887a9c136d82ec38e6b25245d8fa77615270189db56a40a76829f2b870b7a WatchSource:0}: Error finding container 24b887a9c136d82ec38e6b25245d8fa77615270189db56a40a76829f2b870b7a: Status 404 returned error can't find the container with id 24b887a9c136d82ec38e6b25245d8fa77615270189db56a40a76829f2b870b7a Oct 02 21:44:33 crc kubenswrapper[4636]: I1002 21:44:33.616627 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e40a0d0-1f71-4064-924c-df6addeee8b1" path="/var/lib/kubelet/pods/5e40a0d0-1f71-4064-924c-df6addeee8b1/volumes" Oct 02 21:44:33 crc kubenswrapper[4636]: I1002 21:44:33.751897 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1f3dba52-50a6-4789-a799-4e24bbb6e5ab","Type":"ContainerStarted","Data":"24b887a9c136d82ec38e6b25245d8fa77615270189db56a40a76829f2b870b7a"} Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.451031 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.611266 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdsw2\" (UniqueName: \"kubernetes.io/projected/c40074e6-e952-4d2f-b14d-2860b9eba108-kube-api-access-pdsw2\") pod \"c40074e6-e952-4d2f-b14d-2860b9eba108\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.611337 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-scripts\") pod \"c40074e6-e952-4d2f-b14d-2860b9eba108\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.611381 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-config-data\") pod \"c40074e6-e952-4d2f-b14d-2860b9eba108\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.611463 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-httpd-run\") pod \"c40074e6-e952-4d2f-b14d-2860b9eba108\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.611519 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-combined-ca-bundle\") pod \"c40074e6-e952-4d2f-b14d-2860b9eba108\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.611565 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"c40074e6-e952-4d2f-b14d-2860b9eba108\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.611594 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-internal-tls-certs\") pod \"c40074e6-e952-4d2f-b14d-2860b9eba108\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.611652 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-logs\") pod \"c40074e6-e952-4d2f-b14d-2860b9eba108\" (UID: \"c40074e6-e952-4d2f-b14d-2860b9eba108\") " Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.611915 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c40074e6-e952-4d2f-b14d-2860b9eba108" (UID: "c40074e6-e952-4d2f-b14d-2860b9eba108"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.612032 4636 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.612222 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-logs" (OuterVolumeSpecName: "logs") pod "c40074e6-e952-4d2f-b14d-2860b9eba108" (UID: "c40074e6-e952-4d2f-b14d-2860b9eba108"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.620920 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-scripts" (OuterVolumeSpecName: "scripts") pod "c40074e6-e952-4d2f-b14d-2860b9eba108" (UID: "c40074e6-e952-4d2f-b14d-2860b9eba108"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.622019 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c40074e6-e952-4d2f-b14d-2860b9eba108-kube-api-access-pdsw2" (OuterVolumeSpecName: "kube-api-access-pdsw2") pod "c40074e6-e952-4d2f-b14d-2860b9eba108" (UID: "c40074e6-e952-4d2f-b14d-2860b9eba108"). InnerVolumeSpecName "kube-api-access-pdsw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.645641 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c40074e6-e952-4d2f-b14d-2860b9eba108" (UID: "c40074e6-e952-4d2f-b14d-2860b9eba108"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.646901 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "c40074e6-e952-4d2f-b14d-2860b9eba108" (UID: "c40074e6-e952-4d2f-b14d-2860b9eba108"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.693468 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.694048 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c40074e6-e952-4d2f-b14d-2860b9eba108" (UID: "c40074e6-e952-4d2f-b14d-2860b9eba108"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.713820 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.713848 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.713868 4636 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.713877 4636 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.713887 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c40074e6-e952-4d2f-b14d-2860b9eba108-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.713895 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdsw2\" (UniqueName: \"kubernetes.io/projected/c40074e6-e952-4d2f-b14d-2860b9eba108-kube-api-access-pdsw2\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.720530 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-config-data" (OuterVolumeSpecName: "config-data") pod "c40074e6-e952-4d2f-b14d-2860b9eba108" (UID: "c40074e6-e952-4d2f-b14d-2860b9eba108"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.736061 4636 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.766451 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1f3dba52-50a6-4789-a799-4e24bbb6e5ab","Type":"ContainerStarted","Data":"9a8934913b249726b9f5bd81d71ca6b6761a12a0f9d5044cd7bc16c8514c7a25"} Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.768884 4636 generic.go:334] "Generic (PLEG): container finished" podID="c40074e6-e952-4d2f-b14d-2860b9eba108" containerID="2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5" exitCode=0 Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.769043 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c40074e6-e952-4d2f-b14d-2860b9eba108","Type":"ContainerDied","Data":"2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5"} Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.769114 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c40074e6-e952-4d2f-b14d-2860b9eba108","Type":"ContainerDied","Data":"a3dbac607a2b9678d603890d25fb01971466e938dbe041ccef81dea2020c6493"} Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.769139 4636 scope.go:117] "RemoveContainer" containerID="2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.769266 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.799106 4636 scope.go:117] "RemoveContainer" containerID="6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.814021 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.817915 4636 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.817935 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c40074e6-e952-4d2f-b14d-2860b9eba108-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.842043 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.871892 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:44:34 crc kubenswrapper[4636]: E1002 21:44:34.872267 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c40074e6-e952-4d2f-b14d-2860b9eba108" containerName="glance-httpd" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.872280 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="c40074e6-e952-4d2f-b14d-2860b9eba108" containerName="glance-httpd" Oct 02 21:44:34 crc kubenswrapper[4636]: E1002 21:44:34.872321 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c40074e6-e952-4d2f-b14d-2860b9eba108" containerName="glance-log" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.872329 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="c40074e6-e952-4d2f-b14d-2860b9eba108" containerName="glance-log" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.872495 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="c40074e6-e952-4d2f-b14d-2860b9eba108" containerName="glance-httpd" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.872516 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="c40074e6-e952-4d2f-b14d-2860b9eba108" containerName="glance-log" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.877963 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.880760 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.880954 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.897093 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.912891 4636 scope.go:117] "RemoveContainer" containerID="2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5" Oct 02 21:44:34 crc kubenswrapper[4636]: E1002 21:44:34.916159 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5\": container with ID starting with 2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5 not found: ID does not exist" containerID="2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.916191 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5"} err="failed to get container status \"2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5\": rpc error: code = NotFound desc = could not find container \"2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5\": container with ID starting with 2c5920a797bc0976b3b21672a197db5633d16bdf6e9ca93761736aabd3f249f5 not found: ID does not exist" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.916212 4636 scope.go:117] "RemoveContainer" containerID="6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d" Oct 02 21:44:34 crc kubenswrapper[4636]: E1002 21:44:34.920272 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d\": container with ID starting with 6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d not found: ID does not exist" containerID="6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d" Oct 02 21:44:34 crc kubenswrapper[4636]: I1002 21:44:34.920309 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d"} err="failed to get container status \"6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d\": rpc error: code = NotFound desc = could not find container \"6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d\": container with ID starting with 6adc2d444434cbc7f9177bf02f71a2115cbb557752aeedbbc8667c6f090bba2d not found: ID does not exist" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.023129 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.023532 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.023559 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.023603 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbf8l\" (UniqueName: \"kubernetes.io/projected/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-kube-api-access-wbf8l\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.023629 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.023929 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.023987 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.024018 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.125721 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.125783 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.125812 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.125894 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.125919 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.125934 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.125965 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbf8l\" (UniqueName: \"kubernetes.io/projected/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-kube-api-access-wbf8l\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.125984 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.127424 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.127680 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.127760 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.131283 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.132846 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.134132 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.135159 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.165301 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.168845 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbf8l\" (UniqueName: \"kubernetes.io/projected/f2d7a9fb-d15d-4365-97bd-5355c4f6969f-kube-api-access-wbf8l\") pod \"glance-default-internal-api-0\" (UID: \"f2d7a9fb-d15d-4365-97bd-5355c4f6969f\") " pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.210187 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.617193 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c40074e6-e952-4d2f-b14d-2860b9eba108" path="/var/lib/kubelet/pods/c40074e6-e952-4d2f-b14d-2860b9eba108/volumes" Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.777568 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.794713 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1f3dba52-50a6-4789-a799-4e24bbb6e5ab","Type":"ContainerStarted","Data":"5e786609322def82654c5a61f74fcda17aa501de4538cca69a140495e2617c60"} Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.799581 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="ceilometer-central-agent" containerID="cri-o://a107f74dd482df6555763c13d34a835b753956f8884f0a352f4dd3cb79637c8d" gracePeriod=30 Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.799882 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="proxy-httpd" containerID="cri-o://d35436f4ae0c5f959e14eb102c9efdbf5b23e394b28625c501fabd282e48bef3" gracePeriod=30 Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.799955 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="sg-core" containerID="cri-o://a9e60c3abc2d3b2585a1e3e9413487d89625505cee4b8faefea84bc21294623e" gracePeriod=30 Oct 02 21:44:35 crc kubenswrapper[4636]: I1002 21:44:35.800011 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="ceilometer-notification-agent" containerID="cri-o://19d63bc424331622c2b10912d4d245c03857f79e8143272c52ea941e13b08d54" gracePeriod=30 Oct 02 21:44:36 crc kubenswrapper[4636]: I1002 21:44:36.812984 4636 generic.go:334] "Generic (PLEG): container finished" podID="02518a58-8609-4307-a40f-b5f4c752a20b" containerID="d35436f4ae0c5f959e14eb102c9efdbf5b23e394b28625c501fabd282e48bef3" exitCode=0 Oct 02 21:44:36 crc kubenswrapper[4636]: I1002 21:44:36.813396 4636 generic.go:334] "Generic (PLEG): container finished" podID="02518a58-8609-4307-a40f-b5f4c752a20b" containerID="a9e60c3abc2d3b2585a1e3e9413487d89625505cee4b8faefea84bc21294623e" exitCode=2 Oct 02 21:44:36 crc kubenswrapper[4636]: I1002 21:44:36.813411 4636 generic.go:334] "Generic (PLEG): container finished" podID="02518a58-8609-4307-a40f-b5f4c752a20b" containerID="19d63bc424331622c2b10912d4d245c03857f79e8143272c52ea941e13b08d54" exitCode=0 Oct 02 21:44:36 crc kubenswrapper[4636]: I1002 21:44:36.813017 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02518a58-8609-4307-a40f-b5f4c752a20b","Type":"ContainerDied","Data":"d35436f4ae0c5f959e14eb102c9efdbf5b23e394b28625c501fabd282e48bef3"} Oct 02 21:44:36 crc kubenswrapper[4636]: I1002 21:44:36.813487 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02518a58-8609-4307-a40f-b5f4c752a20b","Type":"ContainerDied","Data":"a9e60c3abc2d3b2585a1e3e9413487d89625505cee4b8faefea84bc21294623e"} Oct 02 21:44:36 crc kubenswrapper[4636]: I1002 21:44:36.813501 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02518a58-8609-4307-a40f-b5f4c752a20b","Type":"ContainerDied","Data":"19d63bc424331622c2b10912d4d245c03857f79e8143272c52ea941e13b08d54"} Oct 02 21:44:36 crc kubenswrapper[4636]: I1002 21:44:36.816773 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2d7a9fb-d15d-4365-97bd-5355c4f6969f","Type":"ContainerStarted","Data":"ef7689b9b03d20cd6ecc406bec4a4dd4a6d8a929ea6451c2a47730ce506f4eff"} Oct 02 21:44:36 crc kubenswrapper[4636]: I1002 21:44:36.816813 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2d7a9fb-d15d-4365-97bd-5355c4f6969f","Type":"ContainerStarted","Data":"2d6db4dfc7248854481d030c7e6905f2f01f6cc94ff44ca19606048d4744b6df"} Oct 02 21:44:37 crc kubenswrapper[4636]: I1002 21:44:37.827431 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2d7a9fb-d15d-4365-97bd-5355c4f6969f","Type":"ContainerStarted","Data":"5b6d249812e3fabbf75df53532f2efa03bf703e6753e3e6fd8112b5d39e44d9a"} Oct 02 21:44:37 crc kubenswrapper[4636]: I1002 21:44:37.846967 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.846947038 podStartE2EDuration="5.846947038s" podCreationTimestamp="2025-10-02 21:44:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:44:35.820589087 +0000 UTC m=+1267.143597126" watchObservedRunningTime="2025-10-02 21:44:37.846947038 +0000 UTC m=+1269.169955057" Oct 02 21:44:37 crc kubenswrapper[4636]: I1002 21:44:37.849446 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.849436583 podStartE2EDuration="3.849436583s" podCreationTimestamp="2025-10-02 21:44:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:44:37.845083159 +0000 UTC m=+1269.168091178" watchObservedRunningTime="2025-10-02 21:44:37.849436583 +0000 UTC m=+1269.172444592" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.239226 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-45bc-account-create-q5dkm"] Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.240796 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-45bc-account-create-q5dkm" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.246936 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.258351 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-45bc-account-create-q5dkm"] Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.280946 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg9f5\" (UniqueName: \"kubernetes.io/projected/7a99242c-2b7f-4ef3-85bf-e4f0a870fd25-kube-api-access-cg9f5\") pod \"nova-api-45bc-account-create-q5dkm\" (UID: \"7a99242c-2b7f-4ef3-85bf-e4f0a870fd25\") " pod="openstack/nova-api-45bc-account-create-q5dkm" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.382979 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg9f5\" (UniqueName: \"kubernetes.io/projected/7a99242c-2b7f-4ef3-85bf-e4f0a870fd25-kube-api-access-cg9f5\") pod \"nova-api-45bc-account-create-q5dkm\" (UID: \"7a99242c-2b7f-4ef3-85bf-e4f0a870fd25\") " pod="openstack/nova-api-45bc-account-create-q5dkm" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.407626 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg9f5\" (UniqueName: \"kubernetes.io/projected/7a99242c-2b7f-4ef3-85bf-e4f0a870fd25-kube-api-access-cg9f5\") pod \"nova-api-45bc-account-create-q5dkm\" (UID: \"7a99242c-2b7f-4ef3-85bf-e4f0a870fd25\") " pod="openstack/nova-api-45bc-account-create-q5dkm" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.437113 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-1ba0-account-create-c6g94"] Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.438211 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1ba0-account-create-c6g94" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.441705 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.501084 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvdvw\" (UniqueName: \"kubernetes.io/projected/d5b83cd7-4b9a-40b3-9e66-af7c691c2a21-kube-api-access-lvdvw\") pod \"nova-cell0-1ba0-account-create-c6g94\" (UID: \"d5b83cd7-4b9a-40b3-9e66-af7c691c2a21\") " pod="openstack/nova-cell0-1ba0-account-create-c6g94" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.535828 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1ba0-account-create-c6g94"] Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.556855 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-45bc-account-create-q5dkm" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.602934 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvdvw\" (UniqueName: \"kubernetes.io/projected/d5b83cd7-4b9a-40b3-9e66-af7c691c2a21-kube-api-access-lvdvw\") pod \"nova-cell0-1ba0-account-create-c6g94\" (UID: \"d5b83cd7-4b9a-40b3-9e66-af7c691c2a21\") " pod="openstack/nova-cell0-1ba0-account-create-c6g94" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.626801 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvdvw\" (UniqueName: \"kubernetes.io/projected/d5b83cd7-4b9a-40b3-9e66-af7c691c2a21-kube-api-access-lvdvw\") pod \"nova-cell0-1ba0-account-create-c6g94\" (UID: \"d5b83cd7-4b9a-40b3-9e66-af7c691c2a21\") " pod="openstack/nova-cell0-1ba0-account-create-c6g94" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.646345 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-2c0f-account-create-k2pkd"] Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.647433 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2c0f-account-create-k2pkd" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.650750 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.681714 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2c0f-account-create-k2pkd"] Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.709587 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9md9c\" (UniqueName: \"kubernetes.io/projected/2df6b1a1-db69-4eaa-a32e-2ba717d9eba7-kube-api-access-9md9c\") pod \"nova-cell1-2c0f-account-create-k2pkd\" (UID: \"2df6b1a1-db69-4eaa-a32e-2ba717d9eba7\") " pod="openstack/nova-cell1-2c0f-account-create-k2pkd" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.782492 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1ba0-account-create-c6g94" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.813001 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9md9c\" (UniqueName: \"kubernetes.io/projected/2df6b1a1-db69-4eaa-a32e-2ba717d9eba7-kube-api-access-9md9c\") pod \"nova-cell1-2c0f-account-create-k2pkd\" (UID: \"2df6b1a1-db69-4eaa-a32e-2ba717d9eba7\") " pod="openstack/nova-cell1-2c0f-account-create-k2pkd" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.834654 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9md9c\" (UniqueName: \"kubernetes.io/projected/2df6b1a1-db69-4eaa-a32e-2ba717d9eba7-kube-api-access-9md9c\") pod \"nova-cell1-2c0f-account-create-k2pkd\" (UID: \"2df6b1a1-db69-4eaa-a32e-2ba717d9eba7\") " pod="openstack/nova-cell1-2c0f-account-create-k2pkd" Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.980680 4636 generic.go:334] "Generic (PLEG): container finished" podID="02518a58-8609-4307-a40f-b5f4c752a20b" containerID="a107f74dd482df6555763c13d34a835b753956f8884f0a352f4dd3cb79637c8d" exitCode=0 Oct 02 21:44:38 crc kubenswrapper[4636]: I1002 21:44:38.981707 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02518a58-8609-4307-a40f-b5f4c752a20b","Type":"ContainerDied","Data":"a107f74dd482df6555763c13d34a835b753956f8884f0a352f4dd3cb79637c8d"} Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.013150 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2c0f-account-create-k2pkd" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.131540 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-45bc-account-create-q5dkm"] Oct 02 21:44:39 crc kubenswrapper[4636]: W1002 21:44:39.171956 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a99242c_2b7f_4ef3_85bf_e4f0a870fd25.slice/crio-fd3f0c3f7f984a3c4a89d9e00de15768bd3aad6c6071f462404402e3753fa9a6 WatchSource:0}: Error finding container fd3f0c3f7f984a3c4a89d9e00de15768bd3aad6c6071f462404402e3753fa9a6: Status 404 returned error can't find the container with id fd3f0c3f7f984a3c4a89d9e00de15768bd3aad6c6071f462404402e3753fa9a6 Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.192478 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.224866 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-scripts\") pod \"02518a58-8609-4307-a40f-b5f4c752a20b\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.224969 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-log-httpd\") pod \"02518a58-8609-4307-a40f-b5f4c752a20b\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.225000 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-run-httpd\") pod \"02518a58-8609-4307-a40f-b5f4c752a20b\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.225122 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cjq4\" (UniqueName: \"kubernetes.io/projected/02518a58-8609-4307-a40f-b5f4c752a20b-kube-api-access-8cjq4\") pod \"02518a58-8609-4307-a40f-b5f4c752a20b\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.225154 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-config-data\") pod \"02518a58-8609-4307-a40f-b5f4c752a20b\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.225211 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-sg-core-conf-yaml\") pod \"02518a58-8609-4307-a40f-b5f4c752a20b\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.225299 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-combined-ca-bundle\") pod \"02518a58-8609-4307-a40f-b5f4c752a20b\" (UID: \"02518a58-8609-4307-a40f-b5f4c752a20b\") " Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.228457 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "02518a58-8609-4307-a40f-b5f4c752a20b" (UID: "02518a58-8609-4307-a40f-b5f4c752a20b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.229129 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "02518a58-8609-4307-a40f-b5f4c752a20b" (UID: "02518a58-8609-4307-a40f-b5f4c752a20b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.231735 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-scripts" (OuterVolumeSpecName: "scripts") pod "02518a58-8609-4307-a40f-b5f4c752a20b" (UID: "02518a58-8609-4307-a40f-b5f4c752a20b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.243953 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02518a58-8609-4307-a40f-b5f4c752a20b-kube-api-access-8cjq4" (OuterVolumeSpecName: "kube-api-access-8cjq4") pod "02518a58-8609-4307-a40f-b5f4c752a20b" (UID: "02518a58-8609-4307-a40f-b5f4c752a20b"). InnerVolumeSpecName "kube-api-access-8cjq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.288497 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "02518a58-8609-4307-a40f-b5f4c752a20b" (UID: "02518a58-8609-4307-a40f-b5f4c752a20b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.329392 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.329428 4636 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.329439 4636 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02518a58-8609-4307-a40f-b5f4c752a20b-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.329451 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cjq4\" (UniqueName: \"kubernetes.io/projected/02518a58-8609-4307-a40f-b5f4c752a20b-kube-api-access-8cjq4\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.329463 4636 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.362528 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02518a58-8609-4307-a40f-b5f4c752a20b" (UID: "02518a58-8609-4307-a40f-b5f4c752a20b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.432455 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.439188 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-config-data" (OuterVolumeSpecName: "config-data") pod "02518a58-8609-4307-a40f-b5f4c752a20b" (UID: "02518a58-8609-4307-a40f-b5f4c752a20b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.536293 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02518a58-8609-4307-a40f-b5f4c752a20b-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.566010 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1ba0-account-create-c6g94"] Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.675713 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2c0f-account-create-k2pkd"] Oct 02 21:44:39 crc kubenswrapper[4636]: W1002 21:44:39.681360 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2df6b1a1_db69_4eaa_a32e_2ba717d9eba7.slice/crio-6451dec40b04f4a4c01252c2926e30f23af2be048bb90710b6815778951f71e9 WatchSource:0}: Error finding container 6451dec40b04f4a4c01252c2926e30f23af2be048bb90710b6815778951f71e9: Status 404 returned error can't find the container with id 6451dec40b04f4a4c01252c2926e30f23af2be048bb90710b6815778951f71e9 Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.999318 4636 generic.go:334] "Generic (PLEG): container finished" podID="2df6b1a1-db69-4eaa-a32e-2ba717d9eba7" containerID="3ca210cd4e4ad1fc39c46e49750894bfb0e5bd4218ec01737b4ac838371f5221" exitCode=0 Oct 02 21:44:39 crc kubenswrapper[4636]: I1002 21:44:39.999418 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2c0f-account-create-k2pkd" event={"ID":"2df6b1a1-db69-4eaa-a32e-2ba717d9eba7","Type":"ContainerDied","Data":"3ca210cd4e4ad1fc39c46e49750894bfb0e5bd4218ec01737b4ac838371f5221"} Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:39.999474 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2c0f-account-create-k2pkd" event={"ID":"2df6b1a1-db69-4eaa-a32e-2ba717d9eba7","Type":"ContainerStarted","Data":"6451dec40b04f4a4c01252c2926e30f23af2be048bb90710b6815778951f71e9"} Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.001854 4636 generic.go:334] "Generic (PLEG): container finished" podID="d5b83cd7-4b9a-40b3-9e66-af7c691c2a21" containerID="47b8fcd0901d0adc7fae19fd4cf2a595883d1907113df97f0f0da969324143ce" exitCode=0 Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.001963 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1ba0-account-create-c6g94" event={"ID":"d5b83cd7-4b9a-40b3-9e66-af7c691c2a21","Type":"ContainerDied","Data":"47b8fcd0901d0adc7fae19fd4cf2a595883d1907113df97f0f0da969324143ce"} Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.002015 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1ba0-account-create-c6g94" event={"ID":"d5b83cd7-4b9a-40b3-9e66-af7c691c2a21","Type":"ContainerStarted","Data":"62e1a19a24cb358d696bcc13be090b8ae2fb9c52357594aa545f25fb029c0201"} Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.004101 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a99242c-2b7f-4ef3-85bf-e4f0a870fd25" containerID="04d8510dedeed1780ec92f6df059109baad7c3ac86af080906dbc28b100b262b" exitCode=0 Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.004155 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-45bc-account-create-q5dkm" event={"ID":"7a99242c-2b7f-4ef3-85bf-e4f0a870fd25","Type":"ContainerDied","Data":"04d8510dedeed1780ec92f6df059109baad7c3ac86af080906dbc28b100b262b"} Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.004173 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-45bc-account-create-q5dkm" event={"ID":"7a99242c-2b7f-4ef3-85bf-e4f0a870fd25","Type":"ContainerStarted","Data":"fd3f0c3f7f984a3c4a89d9e00de15768bd3aad6c6071f462404402e3753fa9a6"} Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.006514 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02518a58-8609-4307-a40f-b5f4c752a20b","Type":"ContainerDied","Data":"9a48328728f502468b54ef65ff007744cc27809dd4272837f6738b9d2fa8c7df"} Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.006541 4636 scope.go:117] "RemoveContainer" containerID="d35436f4ae0c5f959e14eb102c9efdbf5b23e394b28625c501fabd282e48bef3" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.006653 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.029543 4636 scope.go:117] "RemoveContainer" containerID="a9e60c3abc2d3b2585a1e3e9413487d89625505cee4b8faefea84bc21294623e" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.052931 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.065895 4636 scope.go:117] "RemoveContainer" containerID="19d63bc424331622c2b10912d4d245c03857f79e8143272c52ea941e13b08d54" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.068917 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.093832 4636 scope.go:117] "RemoveContainer" containerID="a107f74dd482df6555763c13d34a835b753956f8884f0a352f4dd3cb79637c8d" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.103279 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:40 crc kubenswrapper[4636]: E1002 21:44:40.103669 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="sg-core" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.103685 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="sg-core" Oct 02 21:44:40 crc kubenswrapper[4636]: E1002 21:44:40.103720 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="ceilometer-central-agent" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.103728 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="ceilometer-central-agent" Oct 02 21:44:40 crc kubenswrapper[4636]: E1002 21:44:40.103747 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="proxy-httpd" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.103753 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="proxy-httpd" Oct 02 21:44:40 crc kubenswrapper[4636]: E1002 21:44:40.103766 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="ceilometer-notification-agent" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.103870 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="ceilometer-notification-agent" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.104043 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="proxy-httpd" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.104061 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="ceilometer-notification-agent" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.104074 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="ceilometer-central-agent" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.104080 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" containerName="sg-core" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.105820 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.108203 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.108947 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.110221 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.146277 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-config-data\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.146437 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-log-httpd\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.146508 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.146551 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-scripts\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.146571 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.146609 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-run-httpd\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.146642 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h896\" (UniqueName: \"kubernetes.io/projected/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-kube-api-access-4h896\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.247936 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-log-httpd\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.248294 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.248332 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-scripts\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.248349 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.248371 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-run-httpd\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.248400 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h896\" (UniqueName: \"kubernetes.io/projected/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-kube-api-access-4h896\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.248416 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-log-httpd\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.248554 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-config-data\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.248807 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-run-httpd\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.254557 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.254764 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-config-data\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.254905 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.262757 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-scripts\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.266308 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h896\" (UniqueName: \"kubernetes.io/projected/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-kube-api-access-4h896\") pod \"ceilometer-0\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.427790 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:44:40 crc kubenswrapper[4636]: I1002 21:44:40.874195 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.015513 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"480e0adc-0bdc-4c2d-b7d3-b455eb678c66","Type":"ContainerStarted","Data":"d1c13fd6e3b056b96012d909ae55dc7d0868cdf1a437c2962e028b3a22360c1e"} Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.322181 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1ba0-account-create-c6g94" Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.373131 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvdvw\" (UniqueName: \"kubernetes.io/projected/d5b83cd7-4b9a-40b3-9e66-af7c691c2a21-kube-api-access-lvdvw\") pod \"d5b83cd7-4b9a-40b3-9e66-af7c691c2a21\" (UID: \"d5b83cd7-4b9a-40b3-9e66-af7c691c2a21\") " Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.379428 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5b83cd7-4b9a-40b3-9e66-af7c691c2a21-kube-api-access-lvdvw" (OuterVolumeSpecName: "kube-api-access-lvdvw") pod "d5b83cd7-4b9a-40b3-9e66-af7c691c2a21" (UID: "d5b83cd7-4b9a-40b3-9e66-af7c691c2a21"). InnerVolumeSpecName "kube-api-access-lvdvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.403119 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-45bc-account-create-q5dkm" Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.476116 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cg9f5\" (UniqueName: \"kubernetes.io/projected/7a99242c-2b7f-4ef3-85bf-e4f0a870fd25-kube-api-access-cg9f5\") pod \"7a99242c-2b7f-4ef3-85bf-e4f0a870fd25\" (UID: \"7a99242c-2b7f-4ef3-85bf-e4f0a870fd25\") " Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.476674 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvdvw\" (UniqueName: \"kubernetes.io/projected/d5b83cd7-4b9a-40b3-9e66-af7c691c2a21-kube-api-access-lvdvw\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.495488 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a99242c-2b7f-4ef3-85bf-e4f0a870fd25-kube-api-access-cg9f5" (OuterVolumeSpecName: "kube-api-access-cg9f5") pod "7a99242c-2b7f-4ef3-85bf-e4f0a870fd25" (UID: "7a99242c-2b7f-4ef3-85bf-e4f0a870fd25"). InnerVolumeSpecName "kube-api-access-cg9f5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.578181 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cg9f5\" (UniqueName: \"kubernetes.io/projected/7a99242c-2b7f-4ef3-85bf-e4f0a870fd25-kube-api-access-cg9f5\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.608338 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2c0f-account-create-k2pkd" Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.628241 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02518a58-8609-4307-a40f-b5f4c752a20b" path="/var/lib/kubelet/pods/02518a58-8609-4307-a40f-b5f4c752a20b/volumes" Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.679476 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9md9c\" (UniqueName: \"kubernetes.io/projected/2df6b1a1-db69-4eaa-a32e-2ba717d9eba7-kube-api-access-9md9c\") pod \"2df6b1a1-db69-4eaa-a32e-2ba717d9eba7\" (UID: \"2df6b1a1-db69-4eaa-a32e-2ba717d9eba7\") " Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.690994 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2df6b1a1-db69-4eaa-a32e-2ba717d9eba7-kube-api-access-9md9c" (OuterVolumeSpecName: "kube-api-access-9md9c") pod "2df6b1a1-db69-4eaa-a32e-2ba717d9eba7" (UID: "2df6b1a1-db69-4eaa-a32e-2ba717d9eba7"). InnerVolumeSpecName "kube-api-access-9md9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:44:41 crc kubenswrapper[4636]: I1002 21:44:41.781784 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9md9c\" (UniqueName: \"kubernetes.io/projected/2df6b1a1-db69-4eaa-a32e-2ba717d9eba7-kube-api-access-9md9c\") on node \"crc\" DevicePath \"\"" Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.025209 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2c0f-account-create-k2pkd" event={"ID":"2df6b1a1-db69-4eaa-a32e-2ba717d9eba7","Type":"ContainerDied","Data":"6451dec40b04f4a4c01252c2926e30f23af2be048bb90710b6815778951f71e9"} Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.025519 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6451dec40b04f4a4c01252c2926e30f23af2be048bb90710b6815778951f71e9" Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.025249 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2c0f-account-create-k2pkd" Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.027673 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1ba0-account-create-c6g94" event={"ID":"d5b83cd7-4b9a-40b3-9e66-af7c691c2a21","Type":"ContainerDied","Data":"62e1a19a24cb358d696bcc13be090b8ae2fb9c52357594aa545f25fb029c0201"} Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.027724 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1ba0-account-create-c6g94" Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.027713 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62e1a19a24cb358d696bcc13be090b8ae2fb9c52357594aa545f25fb029c0201" Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.029213 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"480e0adc-0bdc-4c2d-b7d3-b455eb678c66","Type":"ContainerStarted","Data":"81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385"} Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.031332 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-45bc-account-create-q5dkm" event={"ID":"7a99242c-2b7f-4ef3-85bf-e4f0a870fd25","Type":"ContainerDied","Data":"fd3f0c3f7f984a3c4a89d9e00de15768bd3aad6c6071f462404402e3753fa9a6"} Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.031360 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd3f0c3f7f984a3c4a89d9e00de15768bd3aad6c6071f462404402e3753fa9a6" Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.031369 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-45bc-account-create-q5dkm" Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.709247 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.709294 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.737901 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 02 21:44:42 crc kubenswrapper[4636]: I1002 21:44:42.748956 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.039672 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"480e0adc-0bdc-4c2d-b7d3-b455eb678c66","Type":"ContainerStarted","Data":"6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f"} Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.040427 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.040579 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.701065 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xdk7c"] Oct 02 21:44:43 crc kubenswrapper[4636]: E1002 21:44:43.701431 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2df6b1a1-db69-4eaa-a32e-2ba717d9eba7" containerName="mariadb-account-create" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.701446 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="2df6b1a1-db69-4eaa-a32e-2ba717d9eba7" containerName="mariadb-account-create" Oct 02 21:44:43 crc kubenswrapper[4636]: E1002 21:44:43.701462 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a99242c-2b7f-4ef3-85bf-e4f0a870fd25" containerName="mariadb-account-create" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.701468 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a99242c-2b7f-4ef3-85bf-e4f0a870fd25" containerName="mariadb-account-create" Oct 02 21:44:43 crc kubenswrapper[4636]: E1002 21:44:43.701493 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5b83cd7-4b9a-40b3-9e66-af7c691c2a21" containerName="mariadb-account-create" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.701500 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5b83cd7-4b9a-40b3-9e66-af7c691c2a21" containerName="mariadb-account-create" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.701672 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="2df6b1a1-db69-4eaa-a32e-2ba717d9eba7" containerName="mariadb-account-create" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.701690 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5b83cd7-4b9a-40b3-9e66-af7c691c2a21" containerName="mariadb-account-create" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.701701 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a99242c-2b7f-4ef3-85bf-e4f0a870fd25" containerName="mariadb-account-create" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.702261 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.716159 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xdk7c"] Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.723070 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.723248 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.723352 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-95zkw" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.818519 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-config-data\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.818822 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sptdj\" (UniqueName: \"kubernetes.io/projected/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-kube-api-access-sptdj\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.818952 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-scripts\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.818987 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.920093 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-config-data\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.920143 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sptdj\" (UniqueName: \"kubernetes.io/projected/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-kube-api-access-sptdj\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.920229 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-scripts\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.920256 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.926735 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-scripts\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.926926 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-config-data\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.940353 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:43 crc kubenswrapper[4636]: I1002 21:44:43.949280 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sptdj\" (UniqueName: \"kubernetes.io/projected/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-kube-api-access-sptdj\") pod \"nova-cell0-conductor-db-sync-xdk7c\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:44 crc kubenswrapper[4636]: I1002 21:44:44.032782 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:44:44 crc kubenswrapper[4636]: I1002 21:44:44.059884 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"480e0adc-0bdc-4c2d-b7d3-b455eb678c66","Type":"ContainerStarted","Data":"a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c"} Oct 02 21:44:44 crc kubenswrapper[4636]: I1002 21:44:44.561608 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xdk7c"] Oct 02 21:44:44 crc kubenswrapper[4636]: W1002 21:44:44.575621 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda97ee2c8_04de_4293_b7f3_eb9ed870c4b5.slice/crio-df89dceeb2b85fd35eaa568abd4fe00cc38b0f1e023df3eb801929a175d9d29f WatchSource:0}: Error finding container df89dceeb2b85fd35eaa568abd4fe00cc38b0f1e023df3eb801929a175d9d29f: Status 404 returned error can't find the container with id df89dceeb2b85fd35eaa568abd4fe00cc38b0f1e023df3eb801929a175d9d29f Oct 02 21:44:45 crc kubenswrapper[4636]: I1002 21:44:45.069888 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"480e0adc-0bdc-4c2d-b7d3-b455eb678c66","Type":"ContainerStarted","Data":"fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b"} Oct 02 21:44:45 crc kubenswrapper[4636]: I1002 21:44:45.070208 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 02 21:44:45 crc kubenswrapper[4636]: I1002 21:44:45.071246 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xdk7c" event={"ID":"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5","Type":"ContainerStarted","Data":"df89dceeb2b85fd35eaa568abd4fe00cc38b0f1e023df3eb801929a175d9d29f"} Oct 02 21:44:45 crc kubenswrapper[4636]: I1002 21:44:45.098015 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.529418049 podStartE2EDuration="5.097994099s" podCreationTimestamp="2025-10-02 21:44:40 +0000 UTC" firstStartedPulling="2025-10-02 21:44:40.878633888 +0000 UTC m=+1272.201641907" lastFinishedPulling="2025-10-02 21:44:44.447209938 +0000 UTC m=+1275.770217957" observedRunningTime="2025-10-02 21:44:45.094063496 +0000 UTC m=+1276.417071505" watchObservedRunningTime="2025-10-02 21:44:45.097994099 +0000 UTC m=+1276.421002108" Oct 02 21:44:45 crc kubenswrapper[4636]: I1002 21:44:45.211136 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:45 crc kubenswrapper[4636]: I1002 21:44:45.211182 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:45 crc kubenswrapper[4636]: I1002 21:44:45.251689 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:45 crc kubenswrapper[4636]: I1002 21:44:45.263426 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:45 crc kubenswrapper[4636]: I1002 21:44:45.590002 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 02 21:44:45 crc kubenswrapper[4636]: I1002 21:44:45.590457 4636 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 21:44:45 crc kubenswrapper[4636]: I1002 21:44:45.623458 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 02 21:44:46 crc kubenswrapper[4636]: I1002 21:44:46.089878 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:46 crc kubenswrapper[4636]: I1002 21:44:46.089906 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:47 crc kubenswrapper[4636]: I1002 21:44:47.889312 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:47 crc kubenswrapper[4636]: I1002 21:44:47.890435 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 02 21:44:49 crc kubenswrapper[4636]: I1002 21:44:49.129831 4636 generic.go:334] "Generic (PLEG): container finished" podID="65063729-cda3-488f-8e94-364db15e2d2d" containerID="0d50d981993a8c7a14f5be874fc82c71a331452cefb9991b2bc42383c51602c1" exitCode=137 Oct 02 21:44:49 crc kubenswrapper[4636]: I1002 21:44:49.129870 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7646d88f4d-85mgl" event={"ID":"65063729-cda3-488f-8e94-364db15e2d2d","Type":"ContainerDied","Data":"0d50d981993a8c7a14f5be874fc82c71a331452cefb9991b2bc42383c51602c1"} Oct 02 21:44:49 crc kubenswrapper[4636]: I1002 21:44:49.130256 4636 scope.go:117] "RemoveContainer" containerID="3cd88a149e153dcf509a1fbfa49ed066efedce2a10516a990ddad09a7052ca1e" Oct 02 21:44:53 crc kubenswrapper[4636]: I1002 21:44:53.117528 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:44:53 crc kubenswrapper[4636]: I1002 21:44:53.118013 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:44:53 crc kubenswrapper[4636]: I1002 21:44:53.163534 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xdk7c" event={"ID":"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5","Type":"ContainerStarted","Data":"9a4285b349cc1212dce26f7a49020db89ac85f45c91dd6c98e1dc70c316e2bba"} Oct 02 21:44:53 crc kubenswrapper[4636]: I1002 21:44:53.166758 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7646d88f4d-85mgl" event={"ID":"65063729-cda3-488f-8e94-364db15e2d2d","Type":"ContainerStarted","Data":"f94e7a4d9899b8882b163af5ee9d4c2348e17f8166fa54dbacb0c7c54a5bb01b"} Oct 02 21:44:53 crc kubenswrapper[4636]: I1002 21:44:53.205051 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-xdk7c" podStartSLOduration=2.335942916 podStartE2EDuration="10.205030826s" podCreationTimestamp="2025-10-02 21:44:43 +0000 UTC" firstStartedPulling="2025-10-02 21:44:44.578441165 +0000 UTC m=+1275.901449184" lastFinishedPulling="2025-10-02 21:44:52.447529075 +0000 UTC m=+1283.770537094" observedRunningTime="2025-10-02 21:44:53.177897272 +0000 UTC m=+1284.500905301" watchObservedRunningTime="2025-10-02 21:44:53.205030826 +0000 UTC m=+1284.528038845" Oct 02 21:44:58 crc kubenswrapper[4636]: I1002 21:44:58.475417 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:44:58 crc kubenswrapper[4636]: I1002 21:44:58.475965 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.155633 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx"] Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.159924 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.163795 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.164068 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.223712 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx"] Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.281024 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/115f6fcc-7689-4d2e-b995-09c5c884ae27-secret-volume\") pod \"collect-profiles-29324025-rthvx\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.281154 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/115f6fcc-7689-4d2e-b995-09c5c884ae27-config-volume\") pod \"collect-profiles-29324025-rthvx\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.281256 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-455xh\" (UniqueName: \"kubernetes.io/projected/115f6fcc-7689-4d2e-b995-09c5c884ae27-kube-api-access-455xh\") pod \"collect-profiles-29324025-rthvx\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.382988 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/115f6fcc-7689-4d2e-b995-09c5c884ae27-secret-volume\") pod \"collect-profiles-29324025-rthvx\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.383098 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/115f6fcc-7689-4d2e-b995-09c5c884ae27-config-volume\") pod \"collect-profiles-29324025-rthvx\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.383147 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-455xh\" (UniqueName: \"kubernetes.io/projected/115f6fcc-7689-4d2e-b995-09c5c884ae27-kube-api-access-455xh\") pod \"collect-profiles-29324025-rthvx\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.384152 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/115f6fcc-7689-4d2e-b995-09c5c884ae27-config-volume\") pod \"collect-profiles-29324025-rthvx\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.400368 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-455xh\" (UniqueName: \"kubernetes.io/projected/115f6fcc-7689-4d2e-b995-09c5c884ae27-kube-api-access-455xh\") pod \"collect-profiles-29324025-rthvx\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.403935 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/115f6fcc-7689-4d2e-b995-09c5c884ae27-secret-volume\") pod \"collect-profiles-29324025-rthvx\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.484227 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:00 crc kubenswrapper[4636]: I1002 21:45:00.940774 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx"] Oct 02 21:45:01 crc kubenswrapper[4636]: I1002 21:45:01.252801 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" event={"ID":"115f6fcc-7689-4d2e-b995-09c5c884ae27","Type":"ContainerStarted","Data":"af80d547d133989fcbc32a03b8eaeb17f07aa37b792cfa5565d9f2eddc50940c"} Oct 02 21:45:01 crc kubenswrapper[4636]: I1002 21:45:01.253127 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" event={"ID":"115f6fcc-7689-4d2e-b995-09c5c884ae27","Type":"ContainerStarted","Data":"ccf2c88a5d1ead63d49bce27e6b37920e2a49198a3808afa619b063335252b7b"} Oct 02 21:45:02 crc kubenswrapper[4636]: I1002 21:45:02.262473 4636 generic.go:334] "Generic (PLEG): container finished" podID="115f6fcc-7689-4d2e-b995-09c5c884ae27" containerID="af80d547d133989fcbc32a03b8eaeb17f07aa37b792cfa5565d9f2eddc50940c" exitCode=0 Oct 02 21:45:02 crc kubenswrapper[4636]: I1002 21:45:02.262799 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" event={"ID":"115f6fcc-7689-4d2e-b995-09c5c884ae27","Type":"ContainerDied","Data":"af80d547d133989fcbc32a03b8eaeb17f07aa37b792cfa5565d9f2eddc50940c"} Oct 02 21:45:03 crc kubenswrapper[4636]: I1002 21:45:03.600004 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:03 crc kubenswrapper[4636]: I1002 21:45:03.743133 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/115f6fcc-7689-4d2e-b995-09c5c884ae27-secret-volume\") pod \"115f6fcc-7689-4d2e-b995-09c5c884ae27\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " Oct 02 21:45:03 crc kubenswrapper[4636]: I1002 21:45:03.743193 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/115f6fcc-7689-4d2e-b995-09c5c884ae27-config-volume\") pod \"115f6fcc-7689-4d2e-b995-09c5c884ae27\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " Oct 02 21:45:03 crc kubenswrapper[4636]: I1002 21:45:03.743378 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-455xh\" (UniqueName: \"kubernetes.io/projected/115f6fcc-7689-4d2e-b995-09c5c884ae27-kube-api-access-455xh\") pod \"115f6fcc-7689-4d2e-b995-09c5c884ae27\" (UID: \"115f6fcc-7689-4d2e-b995-09c5c884ae27\") " Oct 02 21:45:03 crc kubenswrapper[4636]: I1002 21:45:03.743996 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/115f6fcc-7689-4d2e-b995-09c5c884ae27-config-volume" (OuterVolumeSpecName: "config-volume") pod "115f6fcc-7689-4d2e-b995-09c5c884ae27" (UID: "115f6fcc-7689-4d2e-b995-09c5c884ae27"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:45:03 crc kubenswrapper[4636]: I1002 21:45:03.749588 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/115f6fcc-7689-4d2e-b995-09c5c884ae27-kube-api-access-455xh" (OuterVolumeSpecName: "kube-api-access-455xh") pod "115f6fcc-7689-4d2e-b995-09c5c884ae27" (UID: "115f6fcc-7689-4d2e-b995-09c5c884ae27"). InnerVolumeSpecName "kube-api-access-455xh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:03 crc kubenswrapper[4636]: I1002 21:45:03.763546 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/115f6fcc-7689-4d2e-b995-09c5c884ae27-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "115f6fcc-7689-4d2e-b995-09c5c884ae27" (UID: "115f6fcc-7689-4d2e-b995-09c5c884ae27"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:03 crc kubenswrapper[4636]: I1002 21:45:03.846007 4636 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/115f6fcc-7689-4d2e-b995-09c5c884ae27-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:03 crc kubenswrapper[4636]: I1002 21:45:03.846045 4636 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/115f6fcc-7689-4d2e-b995-09c5c884ae27-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:03 crc kubenswrapper[4636]: I1002 21:45:03.846059 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-455xh\" (UniqueName: \"kubernetes.io/projected/115f6fcc-7689-4d2e-b995-09c5c884ae27-kube-api-access-455xh\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:04 crc kubenswrapper[4636]: I1002 21:45:04.285565 4636 generic.go:334] "Generic (PLEG): container finished" podID="a97ee2c8-04de-4293-b7f3-eb9ed870c4b5" containerID="9a4285b349cc1212dce26f7a49020db89ac85f45c91dd6c98e1dc70c316e2bba" exitCode=0 Oct 02 21:45:04 crc kubenswrapper[4636]: I1002 21:45:04.286020 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xdk7c" event={"ID":"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5","Type":"ContainerDied","Data":"9a4285b349cc1212dce26f7a49020db89ac85f45c91dd6c98e1dc70c316e2bba"} Oct 02 21:45:04 crc kubenswrapper[4636]: I1002 21:45:04.289027 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" event={"ID":"115f6fcc-7689-4d2e-b995-09c5c884ae27","Type":"ContainerDied","Data":"ccf2c88a5d1ead63d49bce27e6b37920e2a49198a3808afa619b063335252b7b"} Oct 02 21:45:04 crc kubenswrapper[4636]: I1002 21:45:04.289081 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ccf2c88a5d1ead63d49bce27e6b37920e2a49198a3808afa619b063335252b7b" Oct 02 21:45:04 crc kubenswrapper[4636]: I1002 21:45:04.289143 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx" Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.646714 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.775992 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-config-data\") pod \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.776132 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sptdj\" (UniqueName: \"kubernetes.io/projected/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-kube-api-access-sptdj\") pod \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.776343 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-combined-ca-bundle\") pod \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.776902 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-scripts\") pod \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\" (UID: \"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5\") " Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.781705 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-kube-api-access-sptdj" (OuterVolumeSpecName: "kube-api-access-sptdj") pod "a97ee2c8-04de-4293-b7f3-eb9ed870c4b5" (UID: "a97ee2c8-04de-4293-b7f3-eb9ed870c4b5"). InnerVolumeSpecName "kube-api-access-sptdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.790872 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-scripts" (OuterVolumeSpecName: "scripts") pod "a97ee2c8-04de-4293-b7f3-eb9ed870c4b5" (UID: "a97ee2c8-04de-4293-b7f3-eb9ed870c4b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.806150 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a97ee2c8-04de-4293-b7f3-eb9ed870c4b5" (UID: "a97ee2c8-04de-4293-b7f3-eb9ed870c4b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.816910 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-config-data" (OuterVolumeSpecName: "config-data") pod "a97ee2c8-04de-4293-b7f3-eb9ed870c4b5" (UID: "a97ee2c8-04de-4293-b7f3-eb9ed870c4b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.879537 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.879567 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.879576 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:05 crc kubenswrapper[4636]: I1002 21:45:05.879585 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sptdj\" (UniqueName: \"kubernetes.io/projected/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5-kube-api-access-sptdj\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.312804 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xdk7c" event={"ID":"a97ee2c8-04de-4293-b7f3-eb9ed870c4b5","Type":"ContainerDied","Data":"df89dceeb2b85fd35eaa568abd4fe00cc38b0f1e023df3eb801929a175d9d29f"} Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.313323 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df89dceeb2b85fd35eaa568abd4fe00cc38b0f1e023df3eb801929a175d9d29f" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.313080 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xdk7c" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.424874 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 02 21:45:06 crc kubenswrapper[4636]: E1002 21:45:06.425241 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="115f6fcc-7689-4d2e-b995-09c5c884ae27" containerName="collect-profiles" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.425256 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="115f6fcc-7689-4d2e-b995-09c5c884ae27" containerName="collect-profiles" Oct 02 21:45:06 crc kubenswrapper[4636]: E1002 21:45:06.425279 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a97ee2c8-04de-4293-b7f3-eb9ed870c4b5" containerName="nova-cell0-conductor-db-sync" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.425286 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="a97ee2c8-04de-4293-b7f3-eb9ed870c4b5" containerName="nova-cell0-conductor-db-sync" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.425431 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="115f6fcc-7689-4d2e-b995-09c5c884ae27" containerName="collect-profiles" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.425450 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="a97ee2c8-04de-4293-b7f3-eb9ed870c4b5" containerName="nova-cell0-conductor-db-sync" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.426019 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.429807 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-95zkw" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.430618 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.448522 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.490452 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmtxm\" (UniqueName: \"kubernetes.io/projected/6d5c1a37-2e01-45da-8dc5-06d98b5b07ee-kube-api-access-nmtxm\") pod \"nova-cell0-conductor-0\" (UID: \"6d5c1a37-2e01-45da-8dc5-06d98b5b07ee\") " pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.490547 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5c1a37-2e01-45da-8dc5-06d98b5b07ee-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6d5c1a37-2e01-45da-8dc5-06d98b5b07ee\") " pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.490644 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d5c1a37-2e01-45da-8dc5-06d98b5b07ee-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6d5c1a37-2e01-45da-8dc5-06d98b5b07ee\") " pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.592262 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmtxm\" (UniqueName: \"kubernetes.io/projected/6d5c1a37-2e01-45da-8dc5-06d98b5b07ee-kube-api-access-nmtxm\") pod \"nova-cell0-conductor-0\" (UID: \"6d5c1a37-2e01-45da-8dc5-06d98b5b07ee\") " pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.592333 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5c1a37-2e01-45da-8dc5-06d98b5b07ee-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6d5c1a37-2e01-45da-8dc5-06d98b5b07ee\") " pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.592401 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d5c1a37-2e01-45da-8dc5-06d98b5b07ee-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6d5c1a37-2e01-45da-8dc5-06d98b5b07ee\") " pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.597240 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5c1a37-2e01-45da-8dc5-06d98b5b07ee-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6d5c1a37-2e01-45da-8dc5-06d98b5b07ee\") " pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.608832 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d5c1a37-2e01-45da-8dc5-06d98b5b07ee-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6d5c1a37-2e01-45da-8dc5-06d98b5b07ee\") " pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.609519 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmtxm\" (UniqueName: \"kubernetes.io/projected/6d5c1a37-2e01-45da-8dc5-06d98b5b07ee-kube-api-access-nmtxm\") pod \"nova-cell0-conductor-0\" (UID: \"6d5c1a37-2e01-45da-8dc5-06d98b5b07ee\") " pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:06 crc kubenswrapper[4636]: I1002 21:45:06.745915 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:07 crc kubenswrapper[4636]: I1002 21:45:07.305126 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 02 21:45:07 crc kubenswrapper[4636]: I1002 21:45:07.341070 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"6d5c1a37-2e01-45da-8dc5-06d98b5b07ee","Type":"ContainerStarted","Data":"df27ed975438c36b334c44d402b0385d9f6e17327190f87f443220047a530bb8"} Oct 02 21:45:08 crc kubenswrapper[4636]: I1002 21:45:08.352617 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"6d5c1a37-2e01-45da-8dc5-06d98b5b07ee","Type":"ContainerStarted","Data":"05b18c30888d7d4e9a2853a3842d5a652d5eb74d2008d797881dbb985fa10984"} Oct 02 21:45:08 crc kubenswrapper[4636]: I1002 21:45:08.353058 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:08 crc kubenswrapper[4636]: I1002 21:45:08.391986 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.391968548 podStartE2EDuration="2.391968548s" podCreationTimestamp="2025-10-02 21:45:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:45:08.387418978 +0000 UTC m=+1299.710426997" watchObservedRunningTime="2025-10-02 21:45:08.391968548 +0000 UTC m=+1299.714976567" Oct 02 21:45:08 crc kubenswrapper[4636]: I1002 21:45:08.477188 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7646d88f4d-85mgl" podUID="65063729-cda3-488f-8e94-364db15e2d2d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Oct 02 21:45:10 crc kubenswrapper[4636]: I1002 21:45:10.437294 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 02 21:45:14 crc kubenswrapper[4636]: I1002 21:45:14.486881 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 02 21:45:14 crc kubenswrapper[4636]: I1002 21:45:14.487785 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="c1312598-8735-44c7-a810-4bb4c57e5fba" containerName="kube-state-metrics" containerID="cri-o://ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3" gracePeriod=30 Oct 02 21:45:14 crc kubenswrapper[4636]: I1002 21:45:14.954290 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.096016 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ls8d8\" (UniqueName: \"kubernetes.io/projected/c1312598-8735-44c7-a810-4bb4c57e5fba-kube-api-access-ls8d8\") pod \"c1312598-8735-44c7-a810-4bb4c57e5fba\" (UID: \"c1312598-8735-44c7-a810-4bb4c57e5fba\") " Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.102358 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1312598-8735-44c7-a810-4bb4c57e5fba-kube-api-access-ls8d8" (OuterVolumeSpecName: "kube-api-access-ls8d8") pod "c1312598-8735-44c7-a810-4bb4c57e5fba" (UID: "c1312598-8735-44c7-a810-4bb4c57e5fba"). InnerVolumeSpecName "kube-api-access-ls8d8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.198829 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ls8d8\" (UniqueName: \"kubernetes.io/projected/c1312598-8735-44c7-a810-4bb4c57e5fba-kube-api-access-ls8d8\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.411985 4636 generic.go:334] "Generic (PLEG): container finished" podID="c1312598-8735-44c7-a810-4bb4c57e5fba" containerID="ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3" exitCode=2 Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.412031 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.412047 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c1312598-8735-44c7-a810-4bb4c57e5fba","Type":"ContainerDied","Data":"ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3"} Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.412094 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c1312598-8735-44c7-a810-4bb4c57e5fba","Type":"ContainerDied","Data":"cd9ddf3076f16007decd30bf0fd25f2bc9af1e8f613b5e000e4c94e131768821"} Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.412133 4636 scope.go:117] "RemoveContainer" containerID="ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.429632 4636 scope.go:117] "RemoveContainer" containerID="ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3" Oct 02 21:45:15 crc kubenswrapper[4636]: E1002 21:45:15.430106 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3\": container with ID starting with ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3 not found: ID does not exist" containerID="ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.430147 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3"} err="failed to get container status \"ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3\": rpc error: code = NotFound desc = could not find container \"ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3\": container with ID starting with ace9310fcaf475a413c8837e09d8bf25c2ec7e180fb1bbf8b8d314714c8c1eb3 not found: ID does not exist" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.442813 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.451605 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.471433 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 02 21:45:15 crc kubenswrapper[4636]: E1002 21:45:15.472010 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1312598-8735-44c7-a810-4bb4c57e5fba" containerName="kube-state-metrics" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.472026 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1312598-8735-44c7-a810-4bb4c57e5fba" containerName="kube-state-metrics" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.472181 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1312598-8735-44c7-a810-4bb4c57e5fba" containerName="kube-state-metrics" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.472806 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.474608 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.485836 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.493246 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.604406 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/cb2c4720-c639-416e-9217-f53ae67509e9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.604471 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb2c4720-c639-416e-9217-f53ae67509e9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.604498 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vrc6\" (UniqueName: \"kubernetes.io/projected/cb2c4720-c639-416e-9217-f53ae67509e9-kube-api-access-9vrc6\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.604551 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb2c4720-c639-416e-9217-f53ae67509e9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.613727 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1312598-8735-44c7-a810-4bb4c57e5fba" path="/var/lib/kubelet/pods/c1312598-8735-44c7-a810-4bb4c57e5fba/volumes" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.706989 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/cb2c4720-c639-416e-9217-f53ae67509e9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.707093 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb2c4720-c639-416e-9217-f53ae67509e9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.707115 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vrc6\" (UniqueName: \"kubernetes.io/projected/cb2c4720-c639-416e-9217-f53ae67509e9-kube-api-access-9vrc6\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.707174 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb2c4720-c639-416e-9217-f53ae67509e9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.711056 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/cb2c4720-c639-416e-9217-f53ae67509e9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.711690 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb2c4720-c639-416e-9217-f53ae67509e9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.715510 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb2c4720-c639-416e-9217-f53ae67509e9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.732468 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vrc6\" (UniqueName: \"kubernetes.io/projected/cb2c4720-c639-416e-9217-f53ae67509e9-kube-api-access-9vrc6\") pod \"kube-state-metrics-0\" (UID: \"cb2c4720-c639-416e-9217-f53ae67509e9\") " pod="openstack/kube-state-metrics-0" Oct 02 21:45:15 crc kubenswrapper[4636]: I1002 21:45:15.789583 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 02 21:45:16 crc kubenswrapper[4636]: I1002 21:45:16.229710 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 02 21:45:16 crc kubenswrapper[4636]: I1002 21:45:16.421601 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"cb2c4720-c639-416e-9217-f53ae67509e9","Type":"ContainerStarted","Data":"349701057e6e6e9bb82733fc669d30478048b24c057791d224085bed7c1d6e20"} Oct 02 21:45:16 crc kubenswrapper[4636]: I1002 21:45:16.438180 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:45:16 crc kubenswrapper[4636]: I1002 21:45:16.438490 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="ceilometer-central-agent" containerID="cri-o://81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385" gracePeriod=30 Oct 02 21:45:16 crc kubenswrapper[4636]: I1002 21:45:16.438554 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="sg-core" containerID="cri-o://a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c" gracePeriod=30 Oct 02 21:45:16 crc kubenswrapper[4636]: I1002 21:45:16.438639 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="ceilometer-notification-agent" containerID="cri-o://6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f" gracePeriod=30 Oct 02 21:45:16 crc kubenswrapper[4636]: I1002 21:45:16.438801 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="proxy-httpd" containerID="cri-o://fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b" gracePeriod=30 Oct 02 21:45:16 crc kubenswrapper[4636]: I1002 21:45:16.793546 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.308812 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-bf96z"] Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.315399 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.319286 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.319603 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.338493 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-bf96z"] Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.444453 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-config-data\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.444522 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.444611 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjw8g\" (UniqueName: \"kubernetes.io/projected/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-kube-api-access-xjw8g\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.444634 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-scripts\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.460175 4636 generic.go:334] "Generic (PLEG): container finished" podID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerID="fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b" exitCode=0 Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.460203 4636 generic.go:334] "Generic (PLEG): container finished" podID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerID="a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c" exitCode=2 Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.460210 4636 generic.go:334] "Generic (PLEG): container finished" podID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerID="81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385" exitCode=0 Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.460246 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"480e0adc-0bdc-4c2d-b7d3-b455eb678c66","Type":"ContainerDied","Data":"fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b"} Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.460272 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"480e0adc-0bdc-4c2d-b7d3-b455eb678c66","Type":"ContainerDied","Data":"a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c"} Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.460281 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"480e0adc-0bdc-4c2d-b7d3-b455eb678c66","Type":"ContainerDied","Data":"81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385"} Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.469029 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"cb2c4720-c639-416e-9217-f53ae67509e9","Type":"ContainerStarted","Data":"79098f76f57d18d17e3e9374a4f74756cf731cb22022da8e760f287456c4d8d0"} Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.469686 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.478076 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.481730 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.487371 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.503448 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.514019 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.154016866 podStartE2EDuration="2.513997598s" podCreationTimestamp="2025-10-02 21:45:15 +0000 UTC" firstStartedPulling="2025-10-02 21:45:16.23423447 +0000 UTC m=+1307.557242489" lastFinishedPulling="2025-10-02 21:45:16.594215202 +0000 UTC m=+1307.917223221" observedRunningTime="2025-10-02 21:45:17.512099558 +0000 UTC m=+1308.835107577" watchObservedRunningTime="2025-10-02 21:45:17.513997598 +0000 UTC m=+1308.837005617" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.546468 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjw8g\" (UniqueName: \"kubernetes.io/projected/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-kube-api-access-xjw8g\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.546513 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-scripts\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.546566 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-config-data\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.546604 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.580369 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.597974 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-scripts\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.617874 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.619798 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.637187 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.648625 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-config-data\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.671968 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjw8g\" (UniqueName: \"kubernetes.io/projected/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-kube-api-access-xjw8g\") pod \"nova-cell0-cell-mapping-bf96z\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.683385 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.684425 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sfg5\" (UniqueName: \"kubernetes.io/projected/31d9d741-6c48-4f7a-9594-d01fc29e21e5-kube-api-access-2sfg5\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.684612 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-config-data\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.684685 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31d9d741-6c48-4f7a-9594-d01fc29e21e5-logs\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.685135 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.793810 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.795011 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sfg5\" (UniqueName: \"kubernetes.io/projected/31d9d741-6c48-4f7a-9594-d01fc29e21e5-kube-api-access-2sfg5\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.795043 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48j2c\" (UniqueName: \"kubernetes.io/projected/055cd6f8-9314-4513-b42e-9b309fb29469-kube-api-access-48j2c\") pod \"nova-cell1-novncproxy-0\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.795067 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.795084 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.795110 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-config-data\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.795141 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31d9d741-6c48-4f7a-9594-d01fc29e21e5-logs\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.795160 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.806128 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31d9d741-6c48-4f7a-9594-d01fc29e21e5-logs\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.826341 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.826502 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-config-data\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.857544 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sfg5\" (UniqueName: \"kubernetes.io/projected/31d9d741-6c48-4f7a-9594-d01fc29e21e5-kube-api-access-2sfg5\") pod \"nova-api-0\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " pod="openstack/nova-api-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.882830 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.884484 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.890813 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.896680 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48j2c\" (UniqueName: \"kubernetes.io/projected/055cd6f8-9314-4513-b42e-9b309fb29469-kube-api-access-48j2c\") pod \"nova-cell1-novncproxy-0\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.896725 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.896746 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.905554 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.917821 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.918964 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.930344 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.933797 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.945264 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.945264 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.947888 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-jq4n9"] Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.950027 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.963268 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48j2c\" (UniqueName: \"kubernetes.io/projected/055cd6f8-9314-4513-b42e-9b309fb29469-kube-api-access-48j2c\") pod \"nova-cell1-novncproxy-0\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:17 crc kubenswrapper[4636]: I1002 21:45:17.981797 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-jq4n9"] Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.012252 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4962\" (UniqueName: \"kubernetes.io/projected/81cf4b6f-f5b4-4907-9f45-9f9de8460688-kube-api-access-w4962\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.012289 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-config-data\") pod \"nova-scheduler-0\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.012353 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81cf4b6f-f5b4-4907-9f45-9f9de8460688-logs\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.012369 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.012392 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vs7r\" (UniqueName: \"kubernetes.io/projected/6fa815d8-d589-44d7-970d-571b675084e1-kube-api-access-5vs7r\") pod \"nova-scheduler-0\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.012457 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-config-data\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.012485 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.115782 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-config\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.115886 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4962\" (UniqueName: \"kubernetes.io/projected/81cf4b6f-f5b4-4907-9f45-9f9de8460688-kube-api-access-w4962\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.115930 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-config-data\") pod \"nova-scheduler-0\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.115952 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbxgt\" (UniqueName: \"kubernetes.io/projected/54b64571-ca07-484a-a2cc-e67fdffd5974-kube-api-access-tbxgt\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.115979 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.116020 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.116036 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.116083 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81cf4b6f-f5b4-4907-9f45-9f9de8460688-logs\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.116102 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.116128 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vs7r\" (UniqueName: \"kubernetes.io/projected/6fa815d8-d589-44d7-970d-571b675084e1-kube-api-access-5vs7r\") pod \"nova-scheduler-0\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.116191 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-config-data\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.116217 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.116254 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.117694 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81cf4b6f-f5b4-4907-9f45-9f9de8460688-logs\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.123445 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.123976 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-config-data\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.124461 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.124888 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-config-data\") pod \"nova-scheduler-0\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.132064 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4962\" (UniqueName: \"kubernetes.io/projected/81cf4b6f-f5b4-4907-9f45-9f9de8460688-kube-api-access-w4962\") pod \"nova-metadata-0\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.140109 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.140189 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vs7r\" (UniqueName: \"kubernetes.io/projected/6fa815d8-d589-44d7-970d-571b675084e1-kube-api-access-5vs7r\") pod \"nova-scheduler-0\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.194513 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.218422 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.218476 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-config\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.218546 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbxgt\" (UniqueName: \"kubernetes.io/projected/54b64571-ca07-484a-a2cc-e67fdffd5974-kube-api-access-tbxgt\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.218573 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.218600 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.218615 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.219482 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.219992 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.220610 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.221948 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-config\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.231783 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.234390 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbxgt\" (UniqueName: \"kubernetes.io/projected/54b64571-ca07-484a-a2cc-e67fdffd5974-kube-api-access-tbxgt\") pod \"dnsmasq-dns-845d6d6f59-jq4n9\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.287400 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.305506 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.345128 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.541478 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-bf96z"] Oct 02 21:45:18 crc kubenswrapper[4636]: I1002 21:45:18.627693 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.013352 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.299696 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-jq4n9"] Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.312534 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.335997 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.513012 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rgdjq"] Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.514426 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.517913 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.518628 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.531893 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"055cd6f8-9314-4513-b42e-9b309fb29469","Type":"ContainerStarted","Data":"6310f7591ea3dee9ee7e9ae356723713acf585f129b9631e09a32d0c27c45ce6"} Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.533798 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" event={"ID":"54b64571-ca07-484a-a2cc-e67fdffd5974","Type":"ContainerStarted","Data":"043814a36f263dce238d1a98b0c9deb0ff707a8564abb1dc5de40a1813716549"} Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.553142 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rgdjq"] Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.553182 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81cf4b6f-f5b4-4907-9f45-9f9de8460688","Type":"ContainerStarted","Data":"6e46252ff5fee7d515b97cdcbf6202a4e4c8401f92d65f823af597a4c7a452aa"} Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.559110 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"31d9d741-6c48-4f7a-9594-d01fc29e21e5","Type":"ContainerStarted","Data":"54155862ecaff3a842594c5c45b746d5dbe8436a61dbed97d863ef4d7ea0ce29"} Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.566995 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6fa815d8-d589-44d7-970d-571b675084e1","Type":"ContainerStarted","Data":"d5457ab33f893069450effd2a6569dcd7bf4a4f82f41c9a1676282af2a66acbb"} Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.571413 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bf96z" event={"ID":"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f","Type":"ContainerStarted","Data":"aa4bceb21d8082f758a061347f1b63bf2aae4180a56de7fb8940ee1fd9aa2183"} Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.571457 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bf96z" event={"ID":"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f","Type":"ContainerStarted","Data":"8045029b1a318ae64b3f918c52c6f8d4678047ba17c0342e911efd04288a14d8"} Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.600360 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-bf96z" podStartSLOduration=2.600341489 podStartE2EDuration="2.600341489s" podCreationTimestamp="2025-10-02 21:45:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:45:19.594774473 +0000 UTC m=+1310.917782492" watchObservedRunningTime="2025-10-02 21:45:19.600341489 +0000 UTC m=+1310.923349508" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.658326 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-config-data\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.658378 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-scripts\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.658432 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.658491 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnlqv\" (UniqueName: \"kubernetes.io/projected/3f81ffd6-ca6c-4848-bb80-576172c2f647-kube-api-access-gnlqv\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.768370 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-config-data\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.768465 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-scripts\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.768807 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.769074 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnlqv\" (UniqueName: \"kubernetes.io/projected/3f81ffd6-ca6c-4848-bb80-576172c2f647-kube-api-access-gnlqv\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.794382 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-scripts\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.794556 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-config-data\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.810771 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.822402 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnlqv\" (UniqueName: \"kubernetes.io/projected/3f81ffd6-ca6c-4848-bb80-576172c2f647-kube-api-access-gnlqv\") pod \"nova-cell1-conductor-db-sync-rgdjq\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:19 crc kubenswrapper[4636]: I1002 21:45:19.852940 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.415668 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rgdjq"] Oct 02 21:45:20 crc kubenswrapper[4636]: W1002 21:45:20.432295 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f81ffd6_ca6c_4848_bb80_576172c2f647.slice/crio-23c439d8be55ae6957e8d9897f7877b9de190a85332f75cf213cb424fadf7c81 WatchSource:0}: Error finding container 23c439d8be55ae6957e8d9897f7877b9de190a85332f75cf213cb424fadf7c81: Status 404 returned error can't find the container with id 23c439d8be55ae6957e8d9897f7877b9de190a85332f75cf213cb424fadf7c81 Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.521836 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.600118 4636 generic.go:334] "Generic (PLEG): container finished" podID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerID="6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f" exitCode=0 Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.600184 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.600225 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"480e0adc-0bdc-4c2d-b7d3-b455eb678c66","Type":"ContainerDied","Data":"6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f"} Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.600272 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"480e0adc-0bdc-4c2d-b7d3-b455eb678c66","Type":"ContainerDied","Data":"d1c13fd6e3b056b96012d909ae55dc7d0868cdf1a437c2962e028b3a22360c1e"} Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.600292 4636 scope.go:117] "RemoveContainer" containerID="fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.606395 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rgdjq" event={"ID":"3f81ffd6-ca6c-4848-bb80-576172c2f647","Type":"ContainerStarted","Data":"23c439d8be55ae6957e8d9897f7877b9de190a85332f75cf213cb424fadf7c81"} Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.609452 4636 generic.go:334] "Generic (PLEG): container finished" podID="54b64571-ca07-484a-a2cc-e67fdffd5974" containerID="8fdc7a3e01a9df48562d32c4a04a5e9f66d109b66f674cd13d81b202472e7b31" exitCode=0 Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.610267 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" event={"ID":"54b64571-ca07-484a-a2cc-e67fdffd5974","Type":"ContainerDied","Data":"8fdc7a3e01a9df48562d32c4a04a5e9f66d109b66f674cd13d81b202472e7b31"} Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.708100 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-combined-ca-bundle\") pod \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.708448 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-log-httpd\") pod \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.708473 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-run-httpd\") pod \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.708505 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-scripts\") pod \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.708611 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-sg-core-conf-yaml\") pod \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.708738 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h896\" (UniqueName: \"kubernetes.io/projected/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-kube-api-access-4h896\") pod \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.708787 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-config-data\") pod \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\" (UID: \"480e0adc-0bdc-4c2d-b7d3-b455eb678c66\") " Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.708403 4636 scope.go:117] "RemoveContainer" containerID="a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.710500 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "480e0adc-0bdc-4c2d-b7d3-b455eb678c66" (UID: "480e0adc-0bdc-4c2d-b7d3-b455eb678c66"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.710727 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "480e0adc-0bdc-4c2d-b7d3-b455eb678c66" (UID: "480e0adc-0bdc-4c2d-b7d3-b455eb678c66"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.726127 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-scripts" (OuterVolumeSpecName: "scripts") pod "480e0adc-0bdc-4c2d-b7d3-b455eb678c66" (UID: "480e0adc-0bdc-4c2d-b7d3-b455eb678c66"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.726650 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-kube-api-access-4h896" (OuterVolumeSpecName: "kube-api-access-4h896") pod "480e0adc-0bdc-4c2d-b7d3-b455eb678c66" (UID: "480e0adc-0bdc-4c2d-b7d3-b455eb678c66"). InnerVolumeSpecName "kube-api-access-4h896". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.813022 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h896\" (UniqueName: \"kubernetes.io/projected/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-kube-api-access-4h896\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.813049 4636 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.813058 4636 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.813068 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.838204 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "480e0adc-0bdc-4c2d-b7d3-b455eb678c66" (UID: "480e0adc-0bdc-4c2d-b7d3-b455eb678c66"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.867844 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "480e0adc-0bdc-4c2d-b7d3-b455eb678c66" (UID: "480e0adc-0bdc-4c2d-b7d3-b455eb678c66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.924321 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.924560 4636 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:20 crc kubenswrapper[4636]: I1002 21:45:20.987513 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-config-data" (OuterVolumeSpecName: "config-data") pod "480e0adc-0bdc-4c2d-b7d3-b455eb678c66" (UID: "480e0adc-0bdc-4c2d-b7d3-b455eb678c66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.028961 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/480e0adc-0bdc-4c2d-b7d3-b455eb678c66-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.037557 4636 scope.go:117] "RemoveContainer" containerID="6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.095081 4636 scope.go:117] "RemoveContainer" containerID="81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.172526 4636 scope.go:117] "RemoveContainer" containerID="fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b" Oct 02 21:45:21 crc kubenswrapper[4636]: E1002 21:45:21.173017 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b\": container with ID starting with fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b not found: ID does not exist" containerID="fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.173047 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b"} err="failed to get container status \"fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b\": rpc error: code = NotFound desc = could not find container \"fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b\": container with ID starting with fa74edf01c9bfafbd77c091f72ca7294c81677b662f67c4bb1a97ab098ae756b not found: ID does not exist" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.173070 4636 scope.go:117] "RemoveContainer" containerID="a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c" Oct 02 21:45:21 crc kubenswrapper[4636]: E1002 21:45:21.173286 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c\": container with ID starting with a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c not found: ID does not exist" containerID="a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.173304 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c"} err="failed to get container status \"a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c\": rpc error: code = NotFound desc = could not find container \"a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c\": container with ID starting with a5e512a8d1fbacbb846297a4785fcd8a7dee11939cb7dcc167e3243aff1bb93c not found: ID does not exist" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.173319 4636 scope.go:117] "RemoveContainer" containerID="6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f" Oct 02 21:45:21 crc kubenswrapper[4636]: E1002 21:45:21.174000 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f\": container with ID starting with 6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f not found: ID does not exist" containerID="6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.174050 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f"} err="failed to get container status \"6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f\": rpc error: code = NotFound desc = could not find container \"6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f\": container with ID starting with 6ba335a9fc630ac1a6dadb935377a6b92ac546b8507c1b4e28378ffe65edcb4f not found: ID does not exist" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.174077 4636 scope.go:117] "RemoveContainer" containerID="81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385" Oct 02 21:45:21 crc kubenswrapper[4636]: E1002 21:45:21.174442 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385\": container with ID starting with 81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385 not found: ID does not exist" containerID="81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.174486 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385"} err="failed to get container status \"81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385\": rpc error: code = NotFound desc = could not find container \"81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385\": container with ID starting with 81895fbb3a6e122b0976ada9b67748062f350cdb5b1520dfd52b4178e3e63385 not found: ID does not exist" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.278296 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.294122 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.310416 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.321987 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:45:21 crc kubenswrapper[4636]: E1002 21:45:21.322406 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="proxy-httpd" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.322419 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="proxy-httpd" Oct 02 21:45:21 crc kubenswrapper[4636]: E1002 21:45:21.322446 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="ceilometer-notification-agent" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.322453 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="ceilometer-notification-agent" Oct 02 21:45:21 crc kubenswrapper[4636]: E1002 21:45:21.322477 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="sg-core" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.322483 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="sg-core" Oct 02 21:45:21 crc kubenswrapper[4636]: E1002 21:45:21.322497 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="ceilometer-central-agent" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.322503 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="ceilometer-central-agent" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.322679 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="ceilometer-notification-agent" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.322690 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="ceilometer-central-agent" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.322711 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="sg-core" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.322725 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" containerName="proxy-httpd" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.324365 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.327801 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.328005 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.335490 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.338403 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.360018 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.435429 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.435473 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.435501 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-run-httpd\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.435534 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.435580 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-log-httpd\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.435603 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-scripts\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.435625 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-config-data\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.435659 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4t8j\" (UniqueName: \"kubernetes.io/projected/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-kube-api-access-v4t8j\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.537387 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4t8j\" (UniqueName: \"kubernetes.io/projected/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-kube-api-access-v4t8j\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.537472 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.537493 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.537522 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-run-httpd\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.537556 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.537605 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-log-httpd\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.537628 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-scripts\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.537647 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-config-data\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.538341 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-run-httpd\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.538419 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-log-httpd\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.546634 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.547876 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.550292 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-scripts\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.552466 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-config-data\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.562949 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4t8j\" (UniqueName: \"kubernetes.io/projected/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-kube-api-access-v4t8j\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.564052 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.641200 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.656727 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="480e0adc-0bdc-4c2d-b7d3-b455eb678c66" path="/var/lib/kubelet/pods/480e0adc-0bdc-4c2d-b7d3-b455eb678c66/volumes" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.661176 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rgdjq" event={"ID":"3f81ffd6-ca6c-4848-bb80-576172c2f647","Type":"ContainerStarted","Data":"2907d819a6103e436640d6c422e993c2481d38378bf8e0961c8980e3edc96ddb"} Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.698520 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-rgdjq" podStartSLOduration=2.698496691 podStartE2EDuration="2.698496691s" podCreationTimestamp="2025-10-02 21:45:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:45:21.695741909 +0000 UTC m=+1313.018749928" watchObservedRunningTime="2025-10-02 21:45:21.698496691 +0000 UTC m=+1313.021504700" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.710307 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" event={"ID":"54b64571-ca07-484a-a2cc-e67fdffd5974","Type":"ContainerStarted","Data":"9f2bdc2dac0842e30cb4b34728f2a5fdd78cd49c99a261a805013e9309e2142d"} Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.710362 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:21 crc kubenswrapper[4636]: I1002 21:45:21.763232 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" podStartSLOduration=4.763217046 podStartE2EDuration="4.763217046s" podCreationTimestamp="2025-10-02 21:45:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:45:21.760120324 +0000 UTC m=+1313.083128343" watchObservedRunningTime="2025-10-02 21:45:21.763217046 +0000 UTC m=+1313.086225065" Oct 02 21:45:22 crc kubenswrapper[4636]: I1002 21:45:22.039648 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:45:23 crc kubenswrapper[4636]: I1002 21:45:23.117558 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:45:23 crc kubenswrapper[4636]: I1002 21:45:23.117607 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:45:23 crc kubenswrapper[4636]: I1002 21:45:23.117649 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:45:23 crc kubenswrapper[4636]: I1002 21:45:23.118436 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8eca633a881fe1c5c0ea771b3040511454688e9d05e626e17792bacf3c7ae736"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 21:45:23 crc kubenswrapper[4636]: I1002 21:45:23.118485 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://8eca633a881fe1c5c0ea771b3040511454688e9d05e626e17792bacf3c7ae736" gracePeriod=600 Oct 02 21:45:23 crc kubenswrapper[4636]: I1002 21:45:23.731776 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="8eca633a881fe1c5c0ea771b3040511454688e9d05e626e17792bacf3c7ae736" exitCode=0 Oct 02 21:45:23 crc kubenswrapper[4636]: I1002 21:45:23.731858 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"8eca633a881fe1c5c0ea771b3040511454688e9d05e626e17792bacf3c7ae736"} Oct 02 21:45:23 crc kubenswrapper[4636]: I1002 21:45:23.732386 4636 scope.go:117] "RemoveContainer" containerID="04eab656ee8c7baf8b67922349c9645220b7ee9b1e0b335d95acde2eb540ab5f" Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.281671 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.378536 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7646d88f4d-85mgl" Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.462261 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6897cb4484-tthsj"] Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.462982 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6897cb4484-tthsj" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon-log" containerID="cri-o://bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2" gracePeriod=30 Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.463593 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6897cb4484-tthsj" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon" containerID="cri-o://8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8" gracePeriod=30 Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.759913 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81cf4b6f-f5b4-4907-9f45-9f9de8460688","Type":"ContainerStarted","Data":"0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d"} Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.759961 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81cf4b6f-f5b4-4907-9f45-9f9de8460688","Type":"ContainerStarted","Data":"87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2"} Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.760275 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="81cf4b6f-f5b4-4907-9f45-9f9de8460688" containerName="nova-metadata-log" containerID="cri-o://87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2" gracePeriod=30 Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.760795 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="81cf4b6f-f5b4-4907-9f45-9f9de8460688" containerName="nova-metadata-metadata" containerID="cri-o://0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d" gracePeriod=30 Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.782897 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"31d9d741-6c48-4f7a-9594-d01fc29e21e5","Type":"ContainerStarted","Data":"f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a"} Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.782937 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"31d9d741-6c48-4f7a-9594-d01fc29e21e5","Type":"ContainerStarted","Data":"91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6"} Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.790327 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6fa815d8-d589-44d7-970d-571b675084e1","Type":"ContainerStarted","Data":"36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053"} Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.797542 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e","Type":"ContainerStarted","Data":"f5a31b3adfce575b595e70254dbb504c127aea85a28c34492d651a0886589d7b"} Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.810612 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.368129512 podStartE2EDuration="7.810587399s" podCreationTimestamp="2025-10-02 21:45:17 +0000 UTC" firstStartedPulling="2025-10-02 21:45:19.378008853 +0000 UTC m=+1310.701016872" lastFinishedPulling="2025-10-02 21:45:23.82046674 +0000 UTC m=+1315.143474759" observedRunningTime="2025-10-02 21:45:24.796101757 +0000 UTC m=+1316.119109776" watchObservedRunningTime="2025-10-02 21:45:24.810587399 +0000 UTC m=+1316.133595418" Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.816355 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"055cd6f8-9314-4513-b42e-9b309fb29469","Type":"ContainerStarted","Data":"670531a54c0e7191c5abfab45b0ff98f626f2901ba7fc008158a3e11cc6ccb04"} Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.816521 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="055cd6f8-9314-4513-b42e-9b309fb29469" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://670531a54c0e7191c5abfab45b0ff98f626f2901ba7fc008158a3e11cc6ccb04" gracePeriod=30 Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.825729 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416"} Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.901942 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.788404743 podStartE2EDuration="7.901918144s" podCreationTimestamp="2025-10-02 21:45:17 +0000 UTC" firstStartedPulling="2025-10-02 21:45:18.70737361 +0000 UTC m=+1310.030381629" lastFinishedPulling="2025-10-02 21:45:23.820887011 +0000 UTC m=+1315.143895030" observedRunningTime="2025-10-02 21:45:24.814140842 +0000 UTC m=+1316.137148861" watchObservedRunningTime="2025-10-02 21:45:24.901918144 +0000 UTC m=+1316.224926163" Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.948254 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.488223245 podStartE2EDuration="7.948228844s" podCreationTimestamp="2025-10-02 21:45:17 +0000 UTC" firstStartedPulling="2025-10-02 21:45:19.361493838 +0000 UTC m=+1310.684501857" lastFinishedPulling="2025-10-02 21:45:23.821499437 +0000 UTC m=+1315.144507456" observedRunningTime="2025-10-02 21:45:24.845480828 +0000 UTC m=+1316.168488847" watchObservedRunningTime="2025-10-02 21:45:24.948228844 +0000 UTC m=+1316.271236863" Oct 02 21:45:24 crc kubenswrapper[4636]: I1002 21:45:24.985009 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.174735187 podStartE2EDuration="7.984992402s" podCreationTimestamp="2025-10-02 21:45:17 +0000 UTC" firstStartedPulling="2025-10-02 21:45:19.034795653 +0000 UTC m=+1310.357803672" lastFinishedPulling="2025-10-02 21:45:23.845052868 +0000 UTC m=+1315.168060887" observedRunningTime="2025-10-02 21:45:24.869323246 +0000 UTC m=+1316.192331265" watchObservedRunningTime="2025-10-02 21:45:24.984992402 +0000 UTC m=+1316.308000421" Oct 02 21:45:25 crc kubenswrapper[4636]: I1002 21:45:25.804044 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 02 21:45:25 crc kubenswrapper[4636]: I1002 21:45:25.835833 4636 generic.go:334] "Generic (PLEG): container finished" podID="81cf4b6f-f5b4-4907-9f45-9f9de8460688" containerID="87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2" exitCode=143 Oct 02 21:45:25 crc kubenswrapper[4636]: I1002 21:45:25.835889 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81cf4b6f-f5b4-4907-9f45-9f9de8460688","Type":"ContainerDied","Data":"87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2"} Oct 02 21:45:25 crc kubenswrapper[4636]: I1002 21:45:25.837984 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e","Type":"ContainerStarted","Data":"e92dcfa6399d7043d9c0ce16bf21fc0985ea81b084b58efda8e3710843025f65"} Oct 02 21:45:26 crc kubenswrapper[4636]: I1002 21:45:26.856042 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e","Type":"ContainerStarted","Data":"a56550ac6397281b00be251df4fcfa74ba96fe76a4506429eaf65e5d38b3959d"} Oct 02 21:45:27 crc kubenswrapper[4636]: I1002 21:45:27.869164 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e","Type":"ContainerStarted","Data":"e2cd635d5616db67ebb9dc34cbd6403e0c029f6b4b51859033f990b05bd01f9e"} Oct 02 21:45:27 crc kubenswrapper[4636]: I1002 21:45:27.872196 4636 generic.go:334] "Generic (PLEG): container finished" podID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerID="8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8" exitCode=0 Oct 02 21:45:27 crc kubenswrapper[4636]: I1002 21:45:27.872258 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6897cb4484-tthsj" event={"ID":"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e","Type":"ContainerDied","Data":"8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8"} Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.141458 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.141524 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.195458 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.287851 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.288140 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.305953 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.306019 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.339884 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.347355 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.373248 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6897cb4484-tthsj" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.423625 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-xkn2n"] Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.423851 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" podUID="faafa825-e4c3-45ed-92f6-1798993353d0" containerName="dnsmasq-dns" containerID="cri-o://5d924c48a3e880df5961f2af92c3d0dff8ab6676a0bf2c49198c19d53a3aeb45" gracePeriod=10 Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.891489 4636 generic.go:334] "Generic (PLEG): container finished" podID="faafa825-e4c3-45ed-92f6-1798993353d0" containerID="5d924c48a3e880df5961f2af92c3d0dff8ab6676a0bf2c49198c19d53a3aeb45" exitCode=0 Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.892647 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" event={"ID":"faafa825-e4c3-45ed-92f6-1798993353d0","Type":"ContainerDied","Data":"5d924c48a3e880df5961f2af92c3d0dff8ab6676a0bf2c49198c19d53a3aeb45"} Oct 02 21:45:28 crc kubenswrapper[4636]: I1002 21:45:28.958037 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.226842 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.227054 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.259367 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.356517 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-swift-storage-0\") pod \"faafa825-e4c3-45ed-92f6-1798993353d0\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.356579 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-svc\") pod \"faafa825-e4c3-45ed-92f6-1798993353d0\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.356732 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpqpc\" (UniqueName: \"kubernetes.io/projected/faafa825-e4c3-45ed-92f6-1798993353d0-kube-api-access-hpqpc\") pod \"faafa825-e4c3-45ed-92f6-1798993353d0\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.356782 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-sb\") pod \"faafa825-e4c3-45ed-92f6-1798993353d0\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.356802 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-config\") pod \"faafa825-e4c3-45ed-92f6-1798993353d0\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.356859 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-nb\") pod \"faafa825-e4c3-45ed-92f6-1798993353d0\" (UID: \"faafa825-e4c3-45ed-92f6-1798993353d0\") " Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.401040 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faafa825-e4c3-45ed-92f6-1798993353d0-kube-api-access-hpqpc" (OuterVolumeSpecName: "kube-api-access-hpqpc") pod "faafa825-e4c3-45ed-92f6-1798993353d0" (UID: "faafa825-e4c3-45ed-92f6-1798993353d0"). InnerVolumeSpecName "kube-api-access-hpqpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.459405 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpqpc\" (UniqueName: \"kubernetes.io/projected/faafa825-e4c3-45ed-92f6-1798993353d0-kube-api-access-hpqpc\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.531123 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "faafa825-e4c3-45ed-92f6-1798993353d0" (UID: "faafa825-e4c3-45ed-92f6-1798993353d0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.555606 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "faafa825-e4c3-45ed-92f6-1798993353d0" (UID: "faafa825-e4c3-45ed-92f6-1798993353d0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.561820 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.561950 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.588201 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "faafa825-e4c3-45ed-92f6-1798993353d0" (UID: "faafa825-e4c3-45ed-92f6-1798993353d0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.593399 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-config" (OuterVolumeSpecName: "config") pod "faafa825-e4c3-45ed-92f6-1798993353d0" (UID: "faafa825-e4c3-45ed-92f6-1798993353d0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.617041 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "faafa825-e4c3-45ed-92f6-1798993353d0" (UID: "faafa825-e4c3-45ed-92f6-1798993353d0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.663399 4636 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.663432 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.663441 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/faafa825-e4c3-45ed-92f6-1798993353d0-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.909724 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" event={"ID":"faafa825-e4c3-45ed-92f6-1798993353d0","Type":"ContainerDied","Data":"25dab710bbcc8edff4c5329682c82be95261822eda3829a6d7389c800db24d11"} Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.909849 4636 scope.go:117] "RemoveContainer" containerID="5d924c48a3e880df5961f2af92c3d0dff8ab6676a0bf2c49198c19d53a3aeb45" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.910121 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-xkn2n" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.923717 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e","Type":"ContainerStarted","Data":"173c919ae9f78fcd2f64c671a8b49049e46edb1c5b992fdf85a5bd790cd4cf3d"} Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.923815 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.937800 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-xkn2n"] Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.947587 4636 scope.go:117] "RemoveContainer" containerID="9f94671a75f47c72a0c6f608da5e48835b8a34910ef2639bfa5cb84c3df401bc" Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.949967 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-xkn2n"] Oct 02 21:45:29 crc kubenswrapper[4636]: I1002 21:45:29.953125 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.91735811 podStartE2EDuration="8.953112655s" podCreationTimestamp="2025-10-02 21:45:21 +0000 UTC" firstStartedPulling="2025-10-02 21:45:24.295909043 +0000 UTC m=+1315.618917062" lastFinishedPulling="2025-10-02 21:45:28.331663578 +0000 UTC m=+1319.654671607" observedRunningTime="2025-10-02 21:45:29.947900668 +0000 UTC m=+1321.270908687" watchObservedRunningTime="2025-10-02 21:45:29.953112655 +0000 UTC m=+1321.276120674" Oct 02 21:45:31 crc kubenswrapper[4636]: I1002 21:45:31.614729 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="faafa825-e4c3-45ed-92f6-1798993353d0" path="/var/lib/kubelet/pods/faafa825-e4c3-45ed-92f6-1798993353d0/volumes" Oct 02 21:45:31 crc kubenswrapper[4636]: I1002 21:45:31.982127 4636 generic.go:334] "Generic (PLEG): container finished" podID="3f81ffd6-ca6c-4848-bb80-576172c2f647" containerID="2907d819a6103e436640d6c422e993c2481d38378bf8e0961c8980e3edc96ddb" exitCode=0 Oct 02 21:45:31 crc kubenswrapper[4636]: I1002 21:45:31.982183 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rgdjq" event={"ID":"3f81ffd6-ca6c-4848-bb80-576172c2f647","Type":"ContainerDied","Data":"2907d819a6103e436640d6c422e993c2481d38378bf8e0961c8980e3edc96ddb"} Oct 02 21:45:31 crc kubenswrapper[4636]: I1002 21:45:31.983620 4636 generic.go:334] "Generic (PLEG): container finished" podID="9ea3d75f-eebd-4bb1-ae91-c460d4bac33f" containerID="aa4bceb21d8082f758a061347f1b63bf2aae4180a56de7fb8940ee1fd9aa2183" exitCode=0 Oct 02 21:45:31 crc kubenswrapper[4636]: I1002 21:45:31.983668 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bf96z" event={"ID":"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f","Type":"ContainerDied","Data":"aa4bceb21d8082f758a061347f1b63bf2aae4180a56de7fb8940ee1fd9aa2183"} Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.459827 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.473596 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.542718 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjw8g\" (UniqueName: \"kubernetes.io/projected/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-kube-api-access-xjw8g\") pod \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.542792 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-combined-ca-bundle\") pod \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.542840 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnlqv\" (UniqueName: \"kubernetes.io/projected/3f81ffd6-ca6c-4848-bb80-576172c2f647-kube-api-access-gnlqv\") pod \"3f81ffd6-ca6c-4848-bb80-576172c2f647\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.542881 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-scripts\") pod \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.542902 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-config-data\") pod \"3f81ffd6-ca6c-4848-bb80-576172c2f647\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.543800 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-combined-ca-bundle\") pod \"3f81ffd6-ca6c-4848-bb80-576172c2f647\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.543821 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-scripts\") pod \"3f81ffd6-ca6c-4848-bb80-576172c2f647\" (UID: \"3f81ffd6-ca6c-4848-bb80-576172c2f647\") " Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.543867 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-config-data\") pod \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\" (UID: \"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f\") " Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.548882 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-kube-api-access-xjw8g" (OuterVolumeSpecName: "kube-api-access-xjw8g") pod "9ea3d75f-eebd-4bb1-ae91-c460d4bac33f" (UID: "9ea3d75f-eebd-4bb1-ae91-c460d4bac33f"). InnerVolumeSpecName "kube-api-access-xjw8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.550524 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f81ffd6-ca6c-4848-bb80-576172c2f647-kube-api-access-gnlqv" (OuterVolumeSpecName: "kube-api-access-gnlqv") pod "3f81ffd6-ca6c-4848-bb80-576172c2f647" (UID: "3f81ffd6-ca6c-4848-bb80-576172c2f647"). InnerVolumeSpecName "kube-api-access-gnlqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.551684 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-scripts" (OuterVolumeSpecName: "scripts") pod "9ea3d75f-eebd-4bb1-ae91-c460d4bac33f" (UID: "9ea3d75f-eebd-4bb1-ae91-c460d4bac33f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.554858 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-scripts" (OuterVolumeSpecName: "scripts") pod "3f81ffd6-ca6c-4848-bb80-576172c2f647" (UID: "3f81ffd6-ca6c-4848-bb80-576172c2f647"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.574153 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f81ffd6-ca6c-4848-bb80-576172c2f647" (UID: "3f81ffd6-ca6c-4848-bb80-576172c2f647"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.580465 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-config-data" (OuterVolumeSpecName: "config-data") pod "9ea3d75f-eebd-4bb1-ae91-c460d4bac33f" (UID: "9ea3d75f-eebd-4bb1-ae91-c460d4bac33f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.580731 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9ea3d75f-eebd-4bb1-ae91-c460d4bac33f" (UID: "9ea3d75f-eebd-4bb1-ae91-c460d4bac33f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.587676 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-config-data" (OuterVolumeSpecName: "config-data") pod "3f81ffd6-ca6c-4848-bb80-576172c2f647" (UID: "3f81ffd6-ca6c-4848-bb80-576172c2f647"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.646173 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnlqv\" (UniqueName: \"kubernetes.io/projected/3f81ffd6-ca6c-4848-bb80-576172c2f647-kube-api-access-gnlqv\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.646299 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.646388 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.646475 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.646556 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f81ffd6-ca6c-4848-bb80-576172c2f647-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.646632 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.646800 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjw8g\" (UniqueName: \"kubernetes.io/projected/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-kube-api-access-xjw8g\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:33 crc kubenswrapper[4636]: I1002 21:45:33.646901 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.028511 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bf96z" event={"ID":"9ea3d75f-eebd-4bb1-ae91-c460d4bac33f","Type":"ContainerDied","Data":"8045029b1a318ae64b3f918c52c6f8d4678047ba17c0342e911efd04288a14d8"} Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.028572 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8045029b1a318ae64b3f918c52c6f8d4678047ba17c0342e911efd04288a14d8" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.028698 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bf96z" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.032553 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rgdjq" event={"ID":"3f81ffd6-ca6c-4848-bb80-576172c2f647","Type":"ContainerDied","Data":"23c439d8be55ae6957e8d9897f7877b9de190a85332f75cf213cb424fadf7c81"} Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.032614 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23c439d8be55ae6957e8d9897f7877b9de190a85332f75cf213cb424fadf7c81" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.032725 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rgdjq" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.109885 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 02 21:45:34 crc kubenswrapper[4636]: E1002 21:45:34.110282 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faafa825-e4c3-45ed-92f6-1798993353d0" containerName="init" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.110302 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="faafa825-e4c3-45ed-92f6-1798993353d0" containerName="init" Oct 02 21:45:34 crc kubenswrapper[4636]: E1002 21:45:34.110328 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ea3d75f-eebd-4bb1-ae91-c460d4bac33f" containerName="nova-manage" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.110336 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ea3d75f-eebd-4bb1-ae91-c460d4bac33f" containerName="nova-manage" Oct 02 21:45:34 crc kubenswrapper[4636]: E1002 21:45:34.110348 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f81ffd6-ca6c-4848-bb80-576172c2f647" containerName="nova-cell1-conductor-db-sync" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.110356 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f81ffd6-ca6c-4848-bb80-576172c2f647" containerName="nova-cell1-conductor-db-sync" Oct 02 21:45:34 crc kubenswrapper[4636]: E1002 21:45:34.110385 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faafa825-e4c3-45ed-92f6-1798993353d0" containerName="dnsmasq-dns" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.110393 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="faafa825-e4c3-45ed-92f6-1798993353d0" containerName="dnsmasq-dns" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.110609 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f81ffd6-ca6c-4848-bb80-576172c2f647" containerName="nova-cell1-conductor-db-sync" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.110624 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ea3d75f-eebd-4bb1-ae91-c460d4bac33f" containerName="nova-manage" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.110641 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="faafa825-e4c3-45ed-92f6-1798993353d0" containerName="dnsmasq-dns" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.111257 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.113523 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.118484 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.212866 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.213155 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerName="nova-api-log" containerID="cri-o://91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6" gracePeriod=30 Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.213657 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerName="nova-api-api" containerID="cri-o://f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a" gracePeriod=30 Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.227192 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.227417 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="6fa815d8-d589-44d7-970d-571b675084e1" containerName="nova-scheduler-scheduler" containerID="cri-o://36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053" gracePeriod=30 Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.258369 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vblft\" (UniqueName: \"kubernetes.io/projected/6fd4eafc-387c-4297-9ab1-9f9c0666bf00-kube-api-access-vblft\") pod \"nova-cell1-conductor-0\" (UID: \"6fd4eafc-387c-4297-9ab1-9f9c0666bf00\") " pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.258635 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd4eafc-387c-4297-9ab1-9f9c0666bf00-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6fd4eafc-387c-4297-9ab1-9f9c0666bf00\") " pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.258907 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd4eafc-387c-4297-9ab1-9f9c0666bf00-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6fd4eafc-387c-4297-9ab1-9f9c0666bf00\") " pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.360260 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd4eafc-387c-4297-9ab1-9f9c0666bf00-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6fd4eafc-387c-4297-9ab1-9f9c0666bf00\") " pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.361417 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd4eafc-387c-4297-9ab1-9f9c0666bf00-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6fd4eafc-387c-4297-9ab1-9f9c0666bf00\") " pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.361520 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vblft\" (UniqueName: \"kubernetes.io/projected/6fd4eafc-387c-4297-9ab1-9f9c0666bf00-kube-api-access-vblft\") pod \"nova-cell1-conductor-0\" (UID: \"6fd4eafc-387c-4297-9ab1-9f9c0666bf00\") " pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.366613 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd4eafc-387c-4297-9ab1-9f9c0666bf00-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6fd4eafc-387c-4297-9ab1-9f9c0666bf00\") " pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.368516 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd4eafc-387c-4297-9ab1-9f9c0666bf00-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6fd4eafc-387c-4297-9ab1-9f9c0666bf00\") " pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.384198 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vblft\" (UniqueName: \"kubernetes.io/projected/6fd4eafc-387c-4297-9ab1-9f9c0666bf00-kube-api-access-vblft\") pod \"nova-cell1-conductor-0\" (UID: \"6fd4eafc-387c-4297-9ab1-9f9c0666bf00\") " pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.445360 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:34 crc kubenswrapper[4636]: I1002 21:45:34.948208 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 02 21:45:35 crc kubenswrapper[4636]: I1002 21:45:35.041674 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6fd4eafc-387c-4297-9ab1-9f9c0666bf00","Type":"ContainerStarted","Data":"dce292512a7372a02e135fccc68841be785f93dbe35040199ecfdeae3f0f5dfa"} Oct 02 21:45:35 crc kubenswrapper[4636]: I1002 21:45:35.043562 4636 generic.go:334] "Generic (PLEG): container finished" podID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerID="91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6" exitCode=143 Oct 02 21:45:35 crc kubenswrapper[4636]: I1002 21:45:35.043588 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"31d9d741-6c48-4f7a-9594-d01fc29e21e5","Type":"ContainerDied","Data":"91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6"} Oct 02 21:45:36 crc kubenswrapper[4636]: I1002 21:45:36.054080 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6fd4eafc-387c-4297-9ab1-9f9c0666bf00","Type":"ContainerStarted","Data":"63cc7479a70a2f8163aca1b4c1ceb721ce61830cc86904dd60473abe17ae7398"} Oct 02 21:45:36 crc kubenswrapper[4636]: I1002 21:45:36.054434 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:36 crc kubenswrapper[4636]: I1002 21:45:36.070991 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.07097002 podStartE2EDuration="2.07097002s" podCreationTimestamp="2025-10-02 21:45:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:45:36.068773482 +0000 UTC m=+1327.391781501" watchObservedRunningTime="2025-10-02 21:45:36.07097002 +0000 UTC m=+1327.393978039" Oct 02 21:45:36 crc kubenswrapper[4636]: I1002 21:45:36.941485 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.063330 4636 generic.go:334] "Generic (PLEG): container finished" podID="6fa815d8-d589-44d7-970d-571b675084e1" containerID="36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053" exitCode=0 Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.063396 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.063437 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6fa815d8-d589-44d7-970d-571b675084e1","Type":"ContainerDied","Data":"36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053"} Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.063466 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6fa815d8-d589-44d7-970d-571b675084e1","Type":"ContainerDied","Data":"d5457ab33f893069450effd2a6569dcd7bf4a4f82f41c9a1676282af2a66acbb"} Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.063481 4636 scope.go:117] "RemoveContainer" containerID="36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.088799 4636 scope.go:117] "RemoveContainer" containerID="36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053" Oct 02 21:45:37 crc kubenswrapper[4636]: E1002 21:45:37.089203 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053\": container with ID starting with 36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053 not found: ID does not exist" containerID="36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.089241 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053"} err="failed to get container status \"36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053\": rpc error: code = NotFound desc = could not find container \"36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053\": container with ID starting with 36bda9ef46d26fd6bf93a2dfbff323b99f37c36702788060eef8be1a97cbf053 not found: ID does not exist" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.110201 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-combined-ca-bundle\") pod \"6fa815d8-d589-44d7-970d-571b675084e1\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.110346 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vs7r\" (UniqueName: \"kubernetes.io/projected/6fa815d8-d589-44d7-970d-571b675084e1-kube-api-access-5vs7r\") pod \"6fa815d8-d589-44d7-970d-571b675084e1\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.110422 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-config-data\") pod \"6fa815d8-d589-44d7-970d-571b675084e1\" (UID: \"6fa815d8-d589-44d7-970d-571b675084e1\") " Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.127062 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fa815d8-d589-44d7-970d-571b675084e1-kube-api-access-5vs7r" (OuterVolumeSpecName: "kube-api-access-5vs7r") pod "6fa815d8-d589-44d7-970d-571b675084e1" (UID: "6fa815d8-d589-44d7-970d-571b675084e1"). InnerVolumeSpecName "kube-api-access-5vs7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.142830 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-config-data" (OuterVolumeSpecName: "config-data") pod "6fa815d8-d589-44d7-970d-571b675084e1" (UID: "6fa815d8-d589-44d7-970d-571b675084e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.155993 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6fa815d8-d589-44d7-970d-571b675084e1" (UID: "6fa815d8-d589-44d7-970d-571b675084e1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.213569 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.213980 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vs7r\" (UniqueName: \"kubernetes.io/projected/6fa815d8-d589-44d7-970d-571b675084e1-kube-api-access-5vs7r\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.214003 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fa815d8-d589-44d7-970d-571b675084e1-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.393663 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.404104 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.419009 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:45:37 crc kubenswrapper[4636]: E1002 21:45:37.419443 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fa815d8-d589-44d7-970d-571b675084e1" containerName="nova-scheduler-scheduler" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.419463 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fa815d8-d589-44d7-970d-571b675084e1" containerName="nova-scheduler-scheduler" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.419690 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fa815d8-d589-44d7-970d-571b675084e1" containerName="nova-scheduler-scheduler" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.420399 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.429255 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.472228 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.519059 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zshv\" (UniqueName: \"kubernetes.io/projected/d7d77db9-6965-47b9-96a1-45c6f93bef5a-kube-api-access-4zshv\") pod \"nova-scheduler-0\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.519135 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-config-data\") pod \"nova-scheduler-0\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.519160 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.621321 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zshv\" (UniqueName: \"kubernetes.io/projected/d7d77db9-6965-47b9-96a1-45c6f93bef5a-kube-api-access-4zshv\") pod \"nova-scheduler-0\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.621673 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-config-data\") pod \"nova-scheduler-0\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.621696 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.628416 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-config-data\") pod \"nova-scheduler-0\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.629465 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.647048 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zshv\" (UniqueName: \"kubernetes.io/projected/d7d77db9-6965-47b9-96a1-45c6f93bef5a-kube-api-access-4zshv\") pod \"nova-scheduler-0\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.656938 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fa815d8-d589-44d7-970d-571b675084e1" path="/var/lib/kubelet/pods/6fa815d8-d589-44d7-970d-571b675084e1/volumes" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.736330 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 02 21:45:37 crc kubenswrapper[4636]: I1002 21:45:37.873209 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.029094 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sfg5\" (UniqueName: \"kubernetes.io/projected/31d9d741-6c48-4f7a-9594-d01fc29e21e5-kube-api-access-2sfg5\") pod \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.029342 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-config-data\") pod \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.029396 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31d9d741-6c48-4f7a-9594-d01fc29e21e5-logs\") pod \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.029555 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-combined-ca-bundle\") pod \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\" (UID: \"31d9d741-6c48-4f7a-9594-d01fc29e21e5\") " Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.030326 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31d9d741-6c48-4f7a-9594-d01fc29e21e5-logs" (OuterVolumeSpecName: "logs") pod "31d9d741-6c48-4f7a-9594-d01fc29e21e5" (UID: "31d9d741-6c48-4f7a-9594-d01fc29e21e5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.033515 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d9d741-6c48-4f7a-9594-d01fc29e21e5-kube-api-access-2sfg5" (OuterVolumeSpecName: "kube-api-access-2sfg5") pod "31d9d741-6c48-4f7a-9594-d01fc29e21e5" (UID: "31d9d741-6c48-4f7a-9594-d01fc29e21e5"). InnerVolumeSpecName "kube-api-access-2sfg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.056063 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31d9d741-6c48-4f7a-9594-d01fc29e21e5" (UID: "31d9d741-6c48-4f7a-9594-d01fc29e21e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.063388 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-config-data" (OuterVolumeSpecName: "config-data") pod "31d9d741-6c48-4f7a-9594-d01fc29e21e5" (UID: "31d9d741-6c48-4f7a-9594-d01fc29e21e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.076162 4636 generic.go:334] "Generic (PLEG): container finished" podID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerID="f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a" exitCode=0 Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.076224 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.076243 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"31d9d741-6c48-4f7a-9594-d01fc29e21e5","Type":"ContainerDied","Data":"f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a"} Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.076299 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"31d9d741-6c48-4f7a-9594-d01fc29e21e5","Type":"ContainerDied","Data":"54155862ecaff3a842594c5c45b746d5dbe8436a61dbed97d863ef4d7ea0ce29"} Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.076318 4636 scope.go:117] "RemoveContainer" containerID="f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.107054 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.116482 4636 scope.go:117] "RemoveContainer" containerID="91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.121569 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.132112 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.132139 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sfg5\" (UniqueName: \"kubernetes.io/projected/31d9d741-6c48-4f7a-9594-d01fc29e21e5-kube-api-access-2sfg5\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.132149 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31d9d741-6c48-4f7a-9594-d01fc29e21e5-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.132158 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31d9d741-6c48-4f7a-9594-d01fc29e21e5-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.136050 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 02 21:45:38 crc kubenswrapper[4636]: E1002 21:45:38.136465 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerName="nova-api-api" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.136540 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerName="nova-api-api" Oct 02 21:45:38 crc kubenswrapper[4636]: E1002 21:45:38.136628 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerName="nova-api-log" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.136698 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerName="nova-api-log" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.136949 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerName="nova-api-log" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.137042 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" containerName="nova-api-api" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.138214 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.141074 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.153869 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.162060 4636 scope.go:117] "RemoveContainer" containerID="f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a" Oct 02 21:45:38 crc kubenswrapper[4636]: E1002 21:45:38.162597 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a\": container with ID starting with f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a not found: ID does not exist" containerID="f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.162625 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a"} err="failed to get container status \"f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a\": rpc error: code = NotFound desc = could not find container \"f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a\": container with ID starting with f3caa9ddececf03a6e855c3e53e64cdfbc7819a832a106a3e64e63a9471ea61a not found: ID does not exist" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.162644 4636 scope.go:117] "RemoveContainer" containerID="91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6" Oct 02 21:45:38 crc kubenswrapper[4636]: E1002 21:45:38.166077 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6\": container with ID starting with 91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6 not found: ID does not exist" containerID="91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.166101 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6"} err="failed to get container status \"91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6\": rpc error: code = NotFound desc = could not find container \"91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6\": container with ID starting with 91305afc3d5330a6078f9c90330a715eb6676c9eefdf1ff67235b5e60acea5e6 not found: ID does not exist" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.196208 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:45:38 crc kubenswrapper[4636]: W1002 21:45:38.197472 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7d77db9_6965_47b9_96a1_45c6f93bef5a.slice/crio-de1b2b3e814dd740a6bc237a8303c08d200443e816238ab8cf0acd1979513f3f WatchSource:0}: Error finding container de1b2b3e814dd740a6bc237a8303c08d200443e816238ab8cf0acd1979513f3f: Status 404 returned error can't find the container with id de1b2b3e814dd740a6bc237a8303c08d200443e816238ab8cf0acd1979513f3f Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.234148 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64hsr\" (UniqueName: \"kubernetes.io/projected/d8ceb546-5162-481a-ae0c-538ede4764c5-kube-api-access-64hsr\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.234197 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-config-data\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.234231 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8ceb546-5162-481a-ae0c-538ede4764c5-logs\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.234326 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.336427 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64hsr\" (UniqueName: \"kubernetes.io/projected/d8ceb546-5162-481a-ae0c-538ede4764c5-kube-api-access-64hsr\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.336485 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-config-data\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.336548 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8ceb546-5162-481a-ae0c-538ede4764c5-logs\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.337228 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8ceb546-5162-481a-ae0c-538ede4764c5-logs\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.338250 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.341135 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-config-data\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.344331 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.352991 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64hsr\" (UniqueName: \"kubernetes.io/projected/d8ceb546-5162-481a-ae0c-538ede4764c5-kube-api-access-64hsr\") pod \"nova-api-0\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.370630 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6897cb4484-tthsj" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.470036 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:45:38 crc kubenswrapper[4636]: I1002 21:45:38.974637 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:45:39 crc kubenswrapper[4636]: I1002 21:45:39.094392 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d8ceb546-5162-481a-ae0c-538ede4764c5","Type":"ContainerStarted","Data":"a765f41f2cccda6333e745e0ce10dcd7eb657c9600fbe146244cb6ccab3db884"} Oct 02 21:45:39 crc kubenswrapper[4636]: I1002 21:45:39.099696 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d7d77db9-6965-47b9-96a1-45c6f93bef5a","Type":"ContainerStarted","Data":"002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa"} Oct 02 21:45:39 crc kubenswrapper[4636]: I1002 21:45:39.099716 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d7d77db9-6965-47b9-96a1-45c6f93bef5a","Type":"ContainerStarted","Data":"de1b2b3e814dd740a6bc237a8303c08d200443e816238ab8cf0acd1979513f3f"} Oct 02 21:45:39 crc kubenswrapper[4636]: I1002 21:45:39.120428 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.120411708 podStartE2EDuration="2.120411708s" podCreationTimestamp="2025-10-02 21:45:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:45:39.113879676 +0000 UTC m=+1330.436887695" watchObservedRunningTime="2025-10-02 21:45:39.120411708 +0000 UTC m=+1330.443419727" Oct 02 21:45:39 crc kubenswrapper[4636]: I1002 21:45:39.617807 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d9d741-6c48-4f7a-9594-d01fc29e21e5" path="/var/lib/kubelet/pods/31d9d741-6c48-4f7a-9594-d01fc29e21e5/volumes" Oct 02 21:45:40 crc kubenswrapper[4636]: I1002 21:45:40.112034 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d8ceb546-5162-481a-ae0c-538ede4764c5","Type":"ContainerStarted","Data":"107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5"} Oct 02 21:45:40 crc kubenswrapper[4636]: I1002 21:45:40.112077 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d8ceb546-5162-481a-ae0c-538ede4764c5","Type":"ContainerStarted","Data":"8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1"} Oct 02 21:45:40 crc kubenswrapper[4636]: I1002 21:45:40.148288 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.14826764 podStartE2EDuration="2.14826764s" podCreationTimestamp="2025-10-02 21:45:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:45:40.137656291 +0000 UTC m=+1331.460664350" watchObservedRunningTime="2025-10-02 21:45:40.14826764 +0000 UTC m=+1331.471275669" Oct 02 21:45:42 crc kubenswrapper[4636]: I1002 21:45:42.737008 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 02 21:45:44 crc kubenswrapper[4636]: I1002 21:45:44.477495 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 02 21:45:47 crc kubenswrapper[4636]: I1002 21:45:47.737013 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 02 21:45:47 crc kubenswrapper[4636]: I1002 21:45:47.769303 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 02 21:45:48 crc kubenswrapper[4636]: I1002 21:45:48.217235 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 02 21:45:48 crc kubenswrapper[4636]: I1002 21:45:48.371649 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6897cb4484-tthsj" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 02 21:45:48 crc kubenswrapper[4636]: I1002 21:45:48.371733 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:45:48 crc kubenswrapper[4636]: I1002 21:45:48.470448 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 02 21:45:48 crc kubenswrapper[4636]: I1002 21:45:48.470793 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 02 21:45:49 crc kubenswrapper[4636]: I1002 21:45:49.552084 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:45:49 crc kubenswrapper[4636]: I1002 21:45:49.552130 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 21:45:51 crc kubenswrapper[4636]: I1002 21:45:51.663146 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.006261 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.150488 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.181289 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-combined-ca-bundle\") pod \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.181353 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbj65\" (UniqueName: \"kubernetes.io/projected/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-kube-api-access-xbj65\") pod \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.181411 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-scripts\") pod \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.181528 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-tls-certs\") pod \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.181567 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-logs\") pod \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.181598 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-secret-key\") pod \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.181675 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-config-data\") pod \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\" (UID: \"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.183492 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-logs" (OuterVolumeSpecName: "logs") pod "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" (UID: "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.202640 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-kube-api-access-xbj65" (OuterVolumeSpecName: "kube-api-access-xbj65") pod "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" (UID: "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e"). InnerVolumeSpecName "kube-api-access-xbj65". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.231917 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" (UID: "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.270877 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" (UID: "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.281463 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-scripts" (OuterVolumeSpecName: "scripts") pod "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" (UID: "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.281503 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-config-data" (OuterVolumeSpecName: "config-data") pod "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" (UID: "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.284628 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4962\" (UniqueName: \"kubernetes.io/projected/81cf4b6f-f5b4-4907-9f45-9f9de8460688-kube-api-access-w4962\") pod \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.285436 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-config-data\") pod \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.286480 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-combined-ca-bundle\") pod \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.286595 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81cf4b6f-f5b4-4907-9f45-9f9de8460688-logs\") pod \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\" (UID: \"81cf4b6f-f5b4-4907-9f45-9f9de8460688\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.288410 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.288429 4636 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.288439 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.288448 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.288458 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbj65\" (UniqueName: \"kubernetes.io/projected/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-kube-api-access-xbj65\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.288468 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.288757 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81cf4b6f-f5b4-4907-9f45-9f9de8460688-logs" (OuterVolumeSpecName: "logs") pod "81cf4b6f-f5b4-4907-9f45-9f9de8460688" (UID: "81cf4b6f-f5b4-4907-9f45-9f9de8460688"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.294726 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81cf4b6f-f5b4-4907-9f45-9f9de8460688-kube-api-access-w4962" (OuterVolumeSpecName: "kube-api-access-w4962") pod "81cf4b6f-f5b4-4907-9f45-9f9de8460688" (UID: "81cf4b6f-f5b4-4907-9f45-9f9de8460688"). InnerVolumeSpecName "kube-api-access-w4962". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.305943 4636 generic.go:334] "Generic (PLEG): container finished" podID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerID="bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2" exitCode=137 Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.306020 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6897cb4484-tthsj" event={"ID":"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e","Type":"ContainerDied","Data":"bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2"} Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.306046 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6897cb4484-tthsj" event={"ID":"14b985b5-ee01-4fcc-9225-c1c61ddd9d4e","Type":"ContainerDied","Data":"8eb6df51d904d96784106541fc2fad20b7607f533043d073cfe2449f700d4b5b"} Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.306061 4636 scope.go:117] "RemoveContainer" containerID="8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.306228 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6897cb4484-tthsj" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.311769 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.314198 4636 generic.go:334] "Generic (PLEG): container finished" podID="055cd6f8-9314-4513-b42e-9b309fb29469" containerID="670531a54c0e7191c5abfab45b0ff98f626f2901ba7fc008158a3e11cc6ccb04" exitCode=137 Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.314275 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"055cd6f8-9314-4513-b42e-9b309fb29469","Type":"ContainerDied","Data":"670531a54c0e7191c5abfab45b0ff98f626f2901ba7fc008158a3e11cc6ccb04"} Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.327885 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" (UID: "14b985b5-ee01-4fcc-9225-c1c61ddd9d4e"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.347532 4636 generic.go:334] "Generic (PLEG): container finished" podID="81cf4b6f-f5b4-4907-9f45-9f9de8460688" containerID="0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d" exitCode=137 Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.347573 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81cf4b6f-f5b4-4907-9f45-9f9de8460688","Type":"ContainerDied","Data":"0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d"} Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.347598 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81cf4b6f-f5b4-4907-9f45-9f9de8460688","Type":"ContainerDied","Data":"6e46252ff5fee7d515b97cdcbf6202a4e4c8401f92d65f823af597a4c7a452aa"} Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.347655 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.365839 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81cf4b6f-f5b4-4907-9f45-9f9de8460688" (UID: "81cf4b6f-f5b4-4907-9f45-9f9de8460688"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.392869 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-config-data\") pod \"055cd6f8-9314-4513-b42e-9b309fb29469\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.392925 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-combined-ca-bundle\") pod \"055cd6f8-9314-4513-b42e-9b309fb29469\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.392956 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48j2c\" (UniqueName: \"kubernetes.io/projected/055cd6f8-9314-4513-b42e-9b309fb29469-kube-api-access-48j2c\") pod \"055cd6f8-9314-4513-b42e-9b309fb29469\" (UID: \"055cd6f8-9314-4513-b42e-9b309fb29469\") " Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.393350 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81cf4b6f-f5b4-4907-9f45-9f9de8460688-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.393363 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4962\" (UniqueName: \"kubernetes.io/projected/81cf4b6f-f5b4-4907-9f45-9f9de8460688-kube-api-access-w4962\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.393373 4636 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.393382 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.400001 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-config-data" (OuterVolumeSpecName: "config-data") pod "81cf4b6f-f5b4-4907-9f45-9f9de8460688" (UID: "81cf4b6f-f5b4-4907-9f45-9f9de8460688"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.417080 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/055cd6f8-9314-4513-b42e-9b309fb29469-kube-api-access-48j2c" (OuterVolumeSpecName: "kube-api-access-48j2c") pod "055cd6f8-9314-4513-b42e-9b309fb29469" (UID: "055cd6f8-9314-4513-b42e-9b309fb29469"). InnerVolumeSpecName "kube-api-access-48j2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.471913 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "055cd6f8-9314-4513-b42e-9b309fb29469" (UID: "055cd6f8-9314-4513-b42e-9b309fb29469"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.478629 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-config-data" (OuterVolumeSpecName: "config-data") pod "055cd6f8-9314-4513-b42e-9b309fb29469" (UID: "055cd6f8-9314-4513-b42e-9b309fb29469"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.495543 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81cf4b6f-f5b4-4907-9f45-9f9de8460688-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.495597 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.495612 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/055cd6f8-9314-4513-b42e-9b309fb29469-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.495625 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48j2c\" (UniqueName: \"kubernetes.io/projected/055cd6f8-9314-4513-b42e-9b309fb29469-kube-api-access-48j2c\") on node \"crc\" DevicePath \"\"" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.579455 4636 scope.go:117] "RemoveContainer" containerID="bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.652370 4636 scope.go:117] "RemoveContainer" containerID="8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8" Oct 02 21:45:55 crc kubenswrapper[4636]: E1002 21:45:55.653016 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8\": container with ID starting with 8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8 not found: ID does not exist" containerID="8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.653062 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8"} err="failed to get container status \"8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8\": rpc error: code = NotFound desc = could not find container \"8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8\": container with ID starting with 8abc0d4801d0cd9f7a314afdc8eb3f5434907e4a837093dc2abd64d927d040d8 not found: ID does not exist" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.653083 4636 scope.go:117] "RemoveContainer" containerID="bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2" Oct 02 21:45:55 crc kubenswrapper[4636]: E1002 21:45:55.653394 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2\": container with ID starting with bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2 not found: ID does not exist" containerID="bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.653448 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2"} err="failed to get container status \"bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2\": rpc error: code = NotFound desc = could not find container \"bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2\": container with ID starting with bc3d0a5acdb489088a69c7f65354d4a22ae56f5a36dbda305713a8da815060a2 not found: ID does not exist" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.653472 4636 scope.go:117] "RemoveContainer" containerID="670531a54c0e7191c5abfab45b0ff98f626f2901ba7fc008158a3e11cc6ccb04" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.664814 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6897cb4484-tthsj"] Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.676207 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6897cb4484-tthsj"] Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.680368 4636 scope.go:117] "RemoveContainer" containerID="0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.686904 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.709043 4636 scope.go:117] "RemoveContainer" containerID="87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.757685 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.773490 4636 scope.go:117] "RemoveContainer" containerID="0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d" Oct 02 21:45:55 crc kubenswrapper[4636]: E1002 21:45:55.774970 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d\": container with ID starting with 0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d not found: ID does not exist" containerID="0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.775005 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d"} err="failed to get container status \"0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d\": rpc error: code = NotFound desc = could not find container \"0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d\": container with ID starting with 0e53ee31ce6d2e9496d60d17eb3755e02d34cc383c15ef71c4d0e9a66b46af5d not found: ID does not exist" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.775027 4636 scope.go:117] "RemoveContainer" containerID="87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2" Oct 02 21:45:55 crc kubenswrapper[4636]: E1002 21:45:55.777691 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2\": container with ID starting with 87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2 not found: ID does not exist" containerID="87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.777743 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2"} err="failed to get container status \"87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2\": rpc error: code = NotFound desc = could not find container \"87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2\": container with ID starting with 87acdd2e9cec415831aad063417ee5a23509926845a04f98e5f2db3aefe120b2 not found: ID does not exist" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.778110 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:45:55 crc kubenswrapper[4636]: E1002 21:45:55.781101 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon-log" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.781144 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon-log" Oct 02 21:45:55 crc kubenswrapper[4636]: E1002 21:45:55.781197 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="055cd6f8-9314-4513-b42e-9b309fb29469" containerName="nova-cell1-novncproxy-novncproxy" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.781205 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="055cd6f8-9314-4513-b42e-9b309fb29469" containerName="nova-cell1-novncproxy-novncproxy" Oct 02 21:45:55 crc kubenswrapper[4636]: E1002 21:45:55.781224 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81cf4b6f-f5b4-4907-9f45-9f9de8460688" containerName="nova-metadata-log" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.781230 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="81cf4b6f-f5b4-4907-9f45-9f9de8460688" containerName="nova-metadata-log" Oct 02 21:45:55 crc kubenswrapper[4636]: E1002 21:45:55.781247 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81cf4b6f-f5b4-4907-9f45-9f9de8460688" containerName="nova-metadata-metadata" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.781255 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="81cf4b6f-f5b4-4907-9f45-9f9de8460688" containerName="nova-metadata-metadata" Oct 02 21:45:55 crc kubenswrapper[4636]: E1002 21:45:55.781266 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.781272 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.781581 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="81cf4b6f-f5b4-4907-9f45-9f9de8460688" containerName="nova-metadata-metadata" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.781600 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="055cd6f8-9314-4513-b42e-9b309fb29469" containerName="nova-cell1-novncproxy-novncproxy" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.781610 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon-log" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.781623 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="81cf4b6f-f5b4-4907-9f45-9f9de8460688" containerName="nova-metadata-log" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.781633 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" containerName="horizon" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.783011 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.788659 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.788967 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.789195 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.917547 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.917773 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.917804 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-config-data\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.917821 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx9jh\" (UniqueName: \"kubernetes.io/projected/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-kube-api-access-xx9jh\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:55 crc kubenswrapper[4636]: I1002 21:45:55.917844 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-logs\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.020135 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.020830 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-config-data\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.020857 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx9jh\" (UniqueName: \"kubernetes.io/projected/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-kube-api-access-xx9jh\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.020885 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-logs\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.021010 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.021804 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-logs\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.024504 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.024721 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-config-data\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.026225 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.041276 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx9jh\" (UniqueName: \"kubernetes.io/projected/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-kube-api-access-xx9jh\") pod \"nova-metadata-0\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.105196 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.359203 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"055cd6f8-9314-4513-b42e-9b309fb29469","Type":"ContainerDied","Data":"6310f7591ea3dee9ee7e9ae356723713acf585f129b9631e09a32d0c27c45ce6"} Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.359254 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.385461 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.396041 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.412782 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.414012 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.420854 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.421447 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.421669 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.430970 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.535568 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.535895 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.536034 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.536155 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.536221 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n8jn\" (UniqueName: \"kubernetes.io/projected/912c7cd9-fffe-40eb-a35c-4a05d26a3374-kube-api-access-7n8jn\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.613901 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.637352 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.637396 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.637446 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.637471 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.638276 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n8jn\" (UniqueName: \"kubernetes.io/projected/912c7cd9-fffe-40eb-a35c-4a05d26a3374-kube-api-access-7n8jn\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.644526 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.652071 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.652625 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.658862 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/912c7cd9-fffe-40eb-a35c-4a05d26a3374-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.660454 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n8jn\" (UniqueName: \"kubernetes.io/projected/912c7cd9-fffe-40eb-a35c-4a05d26a3374-kube-api-access-7n8jn\") pod \"nova-cell1-novncproxy-0\" (UID: \"912c7cd9-fffe-40eb-a35c-4a05d26a3374\") " pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:56 crc kubenswrapper[4636]: I1002 21:45:56.738881 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:45:57 crc kubenswrapper[4636]: I1002 21:45:57.196696 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 02 21:45:57 crc kubenswrapper[4636]: W1002 21:45:57.207249 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod912c7cd9_fffe_40eb_a35c_4a05d26a3374.slice/crio-4d7c5a4a8080177348a242850d29408cd2895b1e390944a7a1db507d4e5d5c67 WatchSource:0}: Error finding container 4d7c5a4a8080177348a242850d29408cd2895b1e390944a7a1db507d4e5d5c67: Status 404 returned error can't find the container with id 4d7c5a4a8080177348a242850d29408cd2895b1e390944a7a1db507d4e5d5c67 Oct 02 21:45:57 crc kubenswrapper[4636]: I1002 21:45:57.379563 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"912c7cd9-fffe-40eb-a35c-4a05d26a3374","Type":"ContainerStarted","Data":"4d7c5a4a8080177348a242850d29408cd2895b1e390944a7a1db507d4e5d5c67"} Oct 02 21:45:57 crc kubenswrapper[4636]: I1002 21:45:57.381390 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508","Type":"ContainerStarted","Data":"6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d"} Oct 02 21:45:57 crc kubenswrapper[4636]: I1002 21:45:57.381415 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508","Type":"ContainerStarted","Data":"edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522"} Oct 02 21:45:57 crc kubenswrapper[4636]: I1002 21:45:57.381425 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508","Type":"ContainerStarted","Data":"6e005e529a59da171615c1ed3be976b4194fef55c0056243a204ec38243dcded"} Oct 02 21:45:57 crc kubenswrapper[4636]: I1002 21:45:57.405139 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.405121419 podStartE2EDuration="2.405121419s" podCreationTimestamp="2025-10-02 21:45:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:45:57.397418116 +0000 UTC m=+1348.720426135" watchObservedRunningTime="2025-10-02 21:45:57.405121419 +0000 UTC m=+1348.728129438" Oct 02 21:45:57 crc kubenswrapper[4636]: I1002 21:45:57.623660 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="055cd6f8-9314-4513-b42e-9b309fb29469" path="/var/lib/kubelet/pods/055cd6f8-9314-4513-b42e-9b309fb29469/volumes" Oct 02 21:45:57 crc kubenswrapper[4636]: I1002 21:45:57.624338 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14b985b5-ee01-4fcc-9225-c1c61ddd9d4e" path="/var/lib/kubelet/pods/14b985b5-ee01-4fcc-9225-c1c61ddd9d4e/volumes" Oct 02 21:45:57 crc kubenswrapper[4636]: I1002 21:45:57.625012 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81cf4b6f-f5b4-4907-9f45-9f9de8460688" path="/var/lib/kubelet/pods/81cf4b6f-f5b4-4907-9f45-9f9de8460688/volumes" Oct 02 21:45:58 crc kubenswrapper[4636]: I1002 21:45:58.398903 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"912c7cd9-fffe-40eb-a35c-4a05d26a3374","Type":"ContainerStarted","Data":"4b1c43228e421053a085cad2604a9c0825f9c02402a35ccc274b8369e39de458"} Oct 02 21:45:58 crc kubenswrapper[4636]: I1002 21:45:58.415936 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.415918312 podStartE2EDuration="2.415918312s" podCreationTimestamp="2025-10-02 21:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:45:58.415506691 +0000 UTC m=+1349.738514710" watchObservedRunningTime="2025-10-02 21:45:58.415918312 +0000 UTC m=+1349.738926331" Oct 02 21:45:58 crc kubenswrapper[4636]: I1002 21:45:58.482958 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 02 21:45:58 crc kubenswrapper[4636]: I1002 21:45:58.483555 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 02 21:45:58 crc kubenswrapper[4636]: I1002 21:45:58.483868 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 02 21:45:58 crc kubenswrapper[4636]: I1002 21:45:58.488537 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.404558 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.407606 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.619722 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9khnk"] Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.621133 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9khnk"] Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.621214 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.723005 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.723356 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnjbk\" (UniqueName: \"kubernetes.io/projected/fc3ee6cf-52bc-4586-b226-c40a74952c04-kube-api-access-fnjbk\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.723416 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.723439 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.723473 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-config\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.723487 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.825534 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.825586 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.825620 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-config\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.825638 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.825689 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.825741 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnjbk\" (UniqueName: \"kubernetes.io/projected/fc3ee6cf-52bc-4586-b226-c40a74952c04-kube-api-access-fnjbk\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.826895 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.827590 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.827736 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.827855 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-config\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.828367 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.848652 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnjbk\" (UniqueName: \"kubernetes.io/projected/fc3ee6cf-52bc-4586-b226-c40a74952c04-kube-api-access-fnjbk\") pod \"dnsmasq-dns-59cf4bdb65-9khnk\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:45:59 crc kubenswrapper[4636]: I1002 21:45:59.949887 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:46:00 crc kubenswrapper[4636]: I1002 21:46:00.387986 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9khnk"] Oct 02 21:46:00 crc kubenswrapper[4636]: I1002 21:46:00.415697 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" event={"ID":"fc3ee6cf-52bc-4586-b226-c40a74952c04","Type":"ContainerStarted","Data":"5c63806479055899f66f62c86c1d0bdef2933cb283b0a5e494b3f8e0dc83e36b"} Oct 02 21:46:01 crc kubenswrapper[4636]: I1002 21:46:01.105531 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 02 21:46:01 crc kubenswrapper[4636]: I1002 21:46:01.105814 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 02 21:46:01 crc kubenswrapper[4636]: I1002 21:46:01.424092 4636 generic.go:334] "Generic (PLEG): container finished" podID="fc3ee6cf-52bc-4586-b226-c40a74952c04" containerID="3e1cd06718fdecdaaddca6978d042f225d4e01b4325cdf818af2094c88197fcb" exitCode=0 Oct 02 21:46:01 crc kubenswrapper[4636]: I1002 21:46:01.424160 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" event={"ID":"fc3ee6cf-52bc-4586-b226-c40a74952c04","Type":"ContainerDied","Data":"3e1cd06718fdecdaaddca6978d042f225d4e01b4325cdf818af2094c88197fcb"} Oct 02 21:46:01 crc kubenswrapper[4636]: I1002 21:46:01.654695 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:46:01 crc kubenswrapper[4636]: I1002 21:46:01.657173 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="ceilometer-notification-agent" containerID="cri-o://a56550ac6397281b00be251df4fcfa74ba96fe76a4506429eaf65e5d38b3959d" gracePeriod=30 Oct 02 21:46:01 crc kubenswrapper[4636]: I1002 21:46:01.657183 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="sg-core" containerID="cri-o://e2cd635d5616db67ebb9dc34cbd6403e0c029f6b4b51859033f990b05bd01f9e" gracePeriod=30 Oct 02 21:46:01 crc kubenswrapper[4636]: I1002 21:46:01.657377 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="proxy-httpd" containerID="cri-o://173c919ae9f78fcd2f64c671a8b49049e46edb1c5b992fdf85a5bd790cd4cf3d" gracePeriod=30 Oct 02 21:46:01 crc kubenswrapper[4636]: I1002 21:46:01.657501 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="ceilometer-central-agent" containerID="cri-o://e92dcfa6399d7043d9c0ce16bf21fc0985ea81b084b58efda8e3710843025f65" gracePeriod=30 Oct 02 21:46:01 crc kubenswrapper[4636]: I1002 21:46:01.739343 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:46:01 crc kubenswrapper[4636]: I1002 21:46:01.769027 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:02 crc kubenswrapper[4636]: I1002 21:46:02.434463 4636 generic.go:334] "Generic (PLEG): container finished" podID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerID="173c919ae9f78fcd2f64c671a8b49049e46edb1c5b992fdf85a5bd790cd4cf3d" exitCode=0 Oct 02 21:46:02 crc kubenswrapper[4636]: I1002 21:46:02.434699 4636 generic.go:334] "Generic (PLEG): container finished" podID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerID="e2cd635d5616db67ebb9dc34cbd6403e0c029f6b4b51859033f990b05bd01f9e" exitCode=2 Oct 02 21:46:02 crc kubenswrapper[4636]: I1002 21:46:02.434538 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e","Type":"ContainerDied","Data":"173c919ae9f78fcd2f64c671a8b49049e46edb1c5b992fdf85a5bd790cd4cf3d"} Oct 02 21:46:02 crc kubenswrapper[4636]: I1002 21:46:02.434742 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e","Type":"ContainerDied","Data":"e2cd635d5616db67ebb9dc34cbd6403e0c029f6b4b51859033f990b05bd01f9e"} Oct 02 21:46:02 crc kubenswrapper[4636]: I1002 21:46:02.434788 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e","Type":"ContainerDied","Data":"e92dcfa6399d7043d9c0ce16bf21fc0985ea81b084b58efda8e3710843025f65"} Oct 02 21:46:02 crc kubenswrapper[4636]: I1002 21:46:02.434709 4636 generic.go:334] "Generic (PLEG): container finished" podID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerID="e92dcfa6399d7043d9c0ce16bf21fc0985ea81b084b58efda8e3710843025f65" exitCode=0 Oct 02 21:46:02 crc kubenswrapper[4636]: I1002 21:46:02.436594 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" event={"ID":"fc3ee6cf-52bc-4586-b226-c40a74952c04","Type":"ContainerStarted","Data":"0bea228f50f6510275aedb8d338944230df07f1579325930e830d9debb39fb4f"} Oct 02 21:46:02 crc kubenswrapper[4636]: I1002 21:46:02.436668 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerName="nova-api-log" containerID="cri-o://8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1" gracePeriod=30 Oct 02 21:46:02 crc kubenswrapper[4636]: I1002 21:46:02.436831 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerName="nova-api-api" containerID="cri-o://107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5" gracePeriod=30 Oct 02 21:46:02 crc kubenswrapper[4636]: I1002 21:46:02.467886 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" podStartSLOduration=3.467867875 podStartE2EDuration="3.467867875s" podCreationTimestamp="2025-10-02 21:45:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:46:02.461555308 +0000 UTC m=+1353.784563327" watchObservedRunningTime="2025-10-02 21:46:02.467867875 +0000 UTC m=+1353.790875894" Oct 02 21:46:03 crc kubenswrapper[4636]: I1002 21:46:03.447537 4636 generic.go:334] "Generic (PLEG): container finished" podID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerID="8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1" exitCode=143 Oct 02 21:46:03 crc kubenswrapper[4636]: I1002 21:46:03.447619 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d8ceb546-5162-481a-ae0c-538ede4764c5","Type":"ContainerDied","Data":"8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1"} Oct 02 21:46:03 crc kubenswrapper[4636]: I1002 21:46:03.448243 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.465795 4636 generic.go:334] "Generic (PLEG): container finished" podID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerID="a56550ac6397281b00be251df4fcfa74ba96fe76a4506429eaf65e5d38b3959d" exitCode=0 Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.466094 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e","Type":"ContainerDied","Data":"a56550ac6397281b00be251df4fcfa74ba96fe76a4506429eaf65e5d38b3959d"} Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.466169 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e","Type":"ContainerDied","Data":"f5a31b3adfce575b595e70254dbb504c127aea85a28c34492d651a0886589d7b"} Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.466181 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5a31b3adfce575b595e70254dbb504c127aea85a28c34492d651a0886589d7b" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.509321 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.643073 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-run-httpd\") pod \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.643397 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-config-data\") pod \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.643472 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-combined-ca-bundle\") pod \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.643545 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-log-httpd\") pod \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.643661 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-scripts\") pod \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.643797 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4t8j\" (UniqueName: \"kubernetes.io/projected/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-kube-api-access-v4t8j\") pod \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.643881 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-sg-core-conf-yaml\") pod \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.643573 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" (UID: "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.643972 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-ceilometer-tls-certs\") pod \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\" (UID: \"955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e\") " Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.644910 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" (UID: "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.645479 4636 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.645506 4636 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.648989 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-scripts" (OuterVolumeSpecName: "scripts") pod "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" (UID: "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.650300 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-kube-api-access-v4t8j" (OuterVolumeSpecName: "kube-api-access-v4t8j") pod "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" (UID: "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e"). InnerVolumeSpecName "kube-api-access-v4t8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.673849 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" (UID: "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.697839 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" (UID: "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.727629 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" (UID: "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.747577 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.747609 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.747619 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4t8j\" (UniqueName: \"kubernetes.io/projected/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-kube-api-access-v4t8j\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.747629 4636 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.747638 4636 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.747767 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-config-data" (OuterVolumeSpecName: "config-data") pod "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" (UID: "955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:04 crc kubenswrapper[4636]: I1002 21:46:04.849642 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.474256 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.507845 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.520361 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.562423 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:46:05 crc kubenswrapper[4636]: E1002 21:46:05.563075 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="ceilometer-central-agent" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.563105 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="ceilometer-central-agent" Oct 02 21:46:05 crc kubenswrapper[4636]: E1002 21:46:05.563119 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="ceilometer-notification-agent" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.563128 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="ceilometer-notification-agent" Oct 02 21:46:05 crc kubenswrapper[4636]: E1002 21:46:05.563145 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="proxy-httpd" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.563153 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="proxy-httpd" Oct 02 21:46:05 crc kubenswrapper[4636]: E1002 21:46:05.563180 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="sg-core" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.563189 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="sg-core" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.563419 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="ceilometer-central-agent" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.563445 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="proxy-httpd" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.563466 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="ceilometer-notification-agent" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.563485 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" containerName="sg-core" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.565677 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.568021 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.568422 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.569504 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.572267 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.614442 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e" path="/var/lib/kubelet/pods/955fa4b9-d09e-4b76-b8a5-1b79b55cdb9e/volumes" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.765968 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54mfg\" (UniqueName: \"kubernetes.io/projected/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-kube-api-access-54mfg\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.766245 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-config-data\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.766339 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.766445 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-scripts\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.766558 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-log-httpd\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.766694 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-run-httpd\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.766805 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.766826 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.869497 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54mfg\" (UniqueName: \"kubernetes.io/projected/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-kube-api-access-54mfg\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.869876 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-config-data\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.869932 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.869982 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-scripts\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.870048 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-log-httpd\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.870122 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-run-httpd\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.870237 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.870259 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.871735 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-run-httpd\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.872020 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-log-httpd\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.876776 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.877058 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.878204 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-config-data\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.879098 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-scripts\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.887224 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.893012 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54mfg\" (UniqueName: \"kubernetes.io/projected/3d58ed15-a4f4-4cb0-8d57-95f5a863bca3-kube-api-access-54mfg\") pod \"ceilometer-0\" (UID: \"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3\") " pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.895354 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 02 21:46:05 crc kubenswrapper[4636]: I1002 21:46:05.999016 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.106366 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.106695 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.173381 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-combined-ca-bundle\") pod \"d8ceb546-5162-481a-ae0c-538ede4764c5\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.173422 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-config-data\") pod \"d8ceb546-5162-481a-ae0c-538ede4764c5\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.173591 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64hsr\" (UniqueName: \"kubernetes.io/projected/d8ceb546-5162-481a-ae0c-538ede4764c5-kube-api-access-64hsr\") pod \"d8ceb546-5162-481a-ae0c-538ede4764c5\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.173673 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8ceb546-5162-481a-ae0c-538ede4764c5-logs\") pod \"d8ceb546-5162-481a-ae0c-538ede4764c5\" (UID: \"d8ceb546-5162-481a-ae0c-538ede4764c5\") " Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.174667 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8ceb546-5162-481a-ae0c-538ede4764c5-logs" (OuterVolumeSpecName: "logs") pod "d8ceb546-5162-481a-ae0c-538ede4764c5" (UID: "d8ceb546-5162-481a-ae0c-538ede4764c5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.178355 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8ceb546-5162-481a-ae0c-538ede4764c5-kube-api-access-64hsr" (OuterVolumeSpecName: "kube-api-access-64hsr") pod "d8ceb546-5162-481a-ae0c-538ede4764c5" (UID: "d8ceb546-5162-481a-ae0c-538ede4764c5"). InnerVolumeSpecName "kube-api-access-64hsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.203363 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d8ceb546-5162-481a-ae0c-538ede4764c5" (UID: "d8ceb546-5162-481a-ae0c-538ede4764c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.235700 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-config-data" (OuterVolumeSpecName: "config-data") pod "d8ceb546-5162-481a-ae0c-538ede4764c5" (UID: "d8ceb546-5162-481a-ae0c-538ede4764c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.277034 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.277289 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8ceb546-5162-481a-ae0c-538ede4764c5-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.277361 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64hsr\" (UniqueName: \"kubernetes.io/projected/d8ceb546-5162-481a-ae0c-538ede4764c5-kube-api-access-64hsr\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.277426 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8ceb546-5162-481a-ae0c-538ede4764c5-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.471725 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.482211 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.493574 4636 generic.go:334] "Generic (PLEG): container finished" podID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerID="107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5" exitCode=0 Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.493617 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d8ceb546-5162-481a-ae0c-538ede4764c5","Type":"ContainerDied","Data":"107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5"} Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.493651 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d8ceb546-5162-481a-ae0c-538ede4764c5","Type":"ContainerDied","Data":"a765f41f2cccda6333e745e0ce10dcd7eb657c9600fbe146244cb6ccab3db884"} Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.493669 4636 scope.go:117] "RemoveContainer" containerID="107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.493807 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.555439 4636 scope.go:117] "RemoveContainer" containerID="8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.558733 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.570885 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.595101 4636 scope.go:117] "RemoveContainer" containerID="107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5" Oct 02 21:46:06 crc kubenswrapper[4636]: E1002 21:46:06.595399 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5\": container with ID starting with 107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5 not found: ID does not exist" containerID="107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.595425 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5"} err="failed to get container status \"107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5\": rpc error: code = NotFound desc = could not find container \"107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5\": container with ID starting with 107eb0a409ef075945647923852aede2e5dd65f1d5cce8dab7ef6654e7fe57d5 not found: ID does not exist" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.595445 4636 scope.go:117] "RemoveContainer" containerID="8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1" Oct 02 21:46:06 crc kubenswrapper[4636]: E1002 21:46:06.595721 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1\": container with ID starting with 8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1 not found: ID does not exist" containerID="8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.595779 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1"} err="failed to get container status \"8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1\": rpc error: code = NotFound desc = could not find container \"8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1\": container with ID starting with 8e0ec37f88275eb4266c68936bd3b838b380a2cec150808ab3c4138af36481b1 not found: ID does not exist" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.620785 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:06 crc kubenswrapper[4636]: E1002 21:46:06.621181 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerName="nova-api-api" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.621200 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerName="nova-api-api" Oct 02 21:46:06 crc kubenswrapper[4636]: E1002 21:46:06.621223 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerName="nova-api-log" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.621229 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerName="nova-api-log" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.621398 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerName="nova-api-log" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.621415 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8ceb546-5162-481a-ae0c-538ede4764c5" containerName="nova-api-api" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.622333 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.624239 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.624494 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.627953 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.629210 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.692671 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.692713 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-config-data\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.692741 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zwhn\" (UniqueName: \"kubernetes.io/projected/c3b3db60-4199-4c3a-9315-6d03316f47cd-kube-api-access-4zwhn\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.693394 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.693459 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3b3db60-4199-4c3a-9315-6d03316f47cd-logs\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.693494 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-public-tls-certs\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.739997 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.766278 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.794722 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.794779 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-config-data\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.794804 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zwhn\" (UniqueName: \"kubernetes.io/projected/c3b3db60-4199-4c3a-9315-6d03316f47cd-kube-api-access-4zwhn\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.794894 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.794914 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3b3db60-4199-4c3a-9315-6d03316f47cd-logs\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.794929 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-public-tls-certs\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.796353 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3b3db60-4199-4c3a-9315-6d03316f47cd-logs\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.799971 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.807334 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.811773 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-config-data\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.817430 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zwhn\" (UniqueName: \"kubernetes.io/projected/c3b3db60-4199-4c3a-9315-6d03316f47cd-kube-api-access-4zwhn\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.826622 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-public-tls-certs\") pod \"nova-api-0\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " pod="openstack/nova-api-0" Oct 02 21:46:06 crc kubenswrapper[4636]: I1002 21:46:06.949596 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.134984 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.135562 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.395427 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.510526 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3","Type":"ContainerStarted","Data":"396c7b309c5a5bceae0f75715199a43baf87e1c83529d05d3ba84b1ff3d8369e"} Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.510570 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3","Type":"ContainerStarted","Data":"4cec18a6d79e50e6337ae5392c6f37ec6d8fa985c2c08e85dba5bcd686a79f80"} Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.512473 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c3b3db60-4199-4c3a-9315-6d03316f47cd","Type":"ContainerStarted","Data":"e9dc9c3a86474afc70fcef588492d36333c3db8a0d5582e41e0ba7b050ee7276"} Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.538236 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.622064 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8ceb546-5162-481a-ae0c-538ede4764c5" path="/var/lib/kubelet/pods/d8ceb546-5162-481a-ae0c-538ede4764c5/volumes" Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.952897 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-b82lk"] Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.954307 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.965176 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.965714 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 02 21:46:07 crc kubenswrapper[4636]: I1002 21:46:07.972935 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-b82lk"] Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.023958 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-scripts\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.024026 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-config-data\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.024080 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxxj7\" (UniqueName: \"kubernetes.io/projected/4d28e7d5-138e-4b42-bc0a-d118cca25785-kube-api-access-zxxj7\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.024101 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.127903 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-scripts\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.127983 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-config-data\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.128038 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxxj7\" (UniqueName: \"kubernetes.io/projected/4d28e7d5-138e-4b42-bc0a-d118cca25785-kube-api-access-zxxj7\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.128067 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.137663 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-config-data\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.138164 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-scripts\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.167371 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.189132 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxxj7\" (UniqueName: \"kubernetes.io/projected/4d28e7d5-138e-4b42-bc0a-d118cca25785-kube-api-access-zxxj7\") pod \"nova-cell1-cell-mapping-b82lk\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.439294 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.537083 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3","Type":"ContainerStarted","Data":"63ff9e1c24abf0861f2a18b08d7123a0c752779450aa2b7bcaac54e7ac6b1833"} Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.541375 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c3b3db60-4199-4c3a-9315-6d03316f47cd","Type":"ContainerStarted","Data":"8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e"} Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.541424 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c3b3db60-4199-4c3a-9315-6d03316f47cd","Type":"ContainerStarted","Data":"8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba"} Oct 02 21:46:08 crc kubenswrapper[4636]: I1002 21:46:08.560293 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.560276381 podStartE2EDuration="2.560276381s" podCreationTimestamp="2025-10-02 21:46:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:46:08.557362814 +0000 UTC m=+1359.880370833" watchObservedRunningTime="2025-10-02 21:46:08.560276381 +0000 UTC m=+1359.883284400" Oct 02 21:46:09 crc kubenswrapper[4636]: I1002 21:46:09.010386 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-b82lk"] Oct 02 21:46:09 crc kubenswrapper[4636]: I1002 21:46:09.552165 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3","Type":"ContainerStarted","Data":"7919fc877f5d2d697bd0d95caf612835deef8cc1cebeeb2b564d7308303f063b"} Oct 02 21:46:09 crc kubenswrapper[4636]: I1002 21:46:09.554130 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b82lk" event={"ID":"4d28e7d5-138e-4b42-bc0a-d118cca25785","Type":"ContainerStarted","Data":"4ac9f7567672b639710bb86a4936c3ee21159d9efc95cbc3c25626988f3ebae2"} Oct 02 21:46:09 crc kubenswrapper[4636]: I1002 21:46:09.554165 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b82lk" event={"ID":"4d28e7d5-138e-4b42-bc0a-d118cca25785","Type":"ContainerStarted","Data":"1c28b8404d602703519b74235fb6da065970ec11e58d6372dc0ef08cc24eaecd"} Oct 02 21:46:09 crc kubenswrapper[4636]: I1002 21:46:09.576385 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-b82lk" podStartSLOduration=2.5763687539999998 podStartE2EDuration="2.576368754s" podCreationTimestamp="2025-10-02 21:46:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:46:09.567895512 +0000 UTC m=+1360.890903531" watchObservedRunningTime="2025-10-02 21:46:09.576368754 +0000 UTC m=+1360.899376773" Oct 02 21:46:09 crc kubenswrapper[4636]: I1002 21:46:09.951870 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.032109 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-jq4n9"] Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.032349 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" podUID="54b64571-ca07-484a-a2cc-e67fdffd5974" containerName="dnsmasq-dns" containerID="cri-o://9f2bdc2dac0842e30cb4b34728f2a5fdd78cd49c99a261a805013e9309e2142d" gracePeriod=10 Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.584960 4636 generic.go:334] "Generic (PLEG): container finished" podID="54b64571-ca07-484a-a2cc-e67fdffd5974" containerID="9f2bdc2dac0842e30cb4b34728f2a5fdd78cd49c99a261a805013e9309e2142d" exitCode=0 Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.585269 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" event={"ID":"54b64571-ca07-484a-a2cc-e67fdffd5974","Type":"ContainerDied","Data":"9f2bdc2dac0842e30cb4b34728f2a5fdd78cd49c99a261a805013e9309e2142d"} Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.585331 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" event={"ID":"54b64571-ca07-484a-a2cc-e67fdffd5974","Type":"ContainerDied","Data":"043814a36f263dce238d1a98b0c9deb0ff707a8564abb1dc5de40a1813716549"} Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.585343 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="043814a36f263dce238d1a98b0c9deb0ff707a8564abb1dc5de40a1813716549" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.636640 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.787336 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-sb\") pod \"54b64571-ca07-484a-a2cc-e67fdffd5974\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.787400 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-config\") pod \"54b64571-ca07-484a-a2cc-e67fdffd5974\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.787434 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-nb\") pod \"54b64571-ca07-484a-a2cc-e67fdffd5974\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.787493 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-svc\") pod \"54b64571-ca07-484a-a2cc-e67fdffd5974\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.787528 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-swift-storage-0\") pod \"54b64571-ca07-484a-a2cc-e67fdffd5974\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.787686 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbxgt\" (UniqueName: \"kubernetes.io/projected/54b64571-ca07-484a-a2cc-e67fdffd5974-kube-api-access-tbxgt\") pod \"54b64571-ca07-484a-a2cc-e67fdffd5974\" (UID: \"54b64571-ca07-484a-a2cc-e67fdffd5974\") " Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.804070 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54b64571-ca07-484a-a2cc-e67fdffd5974-kube-api-access-tbxgt" (OuterVolumeSpecName: "kube-api-access-tbxgt") pod "54b64571-ca07-484a-a2cc-e67fdffd5974" (UID: "54b64571-ca07-484a-a2cc-e67fdffd5974"). InnerVolumeSpecName "kube-api-access-tbxgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.878829 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "54b64571-ca07-484a-a2cc-e67fdffd5974" (UID: "54b64571-ca07-484a-a2cc-e67fdffd5974"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.894172 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbxgt\" (UniqueName: \"kubernetes.io/projected/54b64571-ca07-484a-a2cc-e67fdffd5974-kube-api-access-tbxgt\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.894399 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.918861 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "54b64571-ca07-484a-a2cc-e67fdffd5974" (UID: "54b64571-ca07-484a-a2cc-e67fdffd5974"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.933279 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-config" (OuterVolumeSpecName: "config") pod "54b64571-ca07-484a-a2cc-e67fdffd5974" (UID: "54b64571-ca07-484a-a2cc-e67fdffd5974"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.933374 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "54b64571-ca07-484a-a2cc-e67fdffd5974" (UID: "54b64571-ca07-484a-a2cc-e67fdffd5974"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.969990 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "54b64571-ca07-484a-a2cc-e67fdffd5974" (UID: "54b64571-ca07-484a-a2cc-e67fdffd5974"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.995832 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.996062 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.996127 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:10 crc kubenswrapper[4636]: I1002 21:46:10.996202 4636 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/54b64571-ca07-484a-a2cc-e67fdffd5974-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:11 crc kubenswrapper[4636]: I1002 21:46:11.595339 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3d58ed15-a4f4-4cb0-8d57-95f5a863bca3","Type":"ContainerStarted","Data":"7ebe3772373963a5a7e72eb121a2cbe13b0c22dcd316e7ba51a41541414c575c"} Oct 02 21:46:11 crc kubenswrapper[4636]: I1002 21:46:11.595356 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-jq4n9" Oct 02 21:46:11 crc kubenswrapper[4636]: I1002 21:46:11.595709 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 02 21:46:11 crc kubenswrapper[4636]: I1002 21:46:11.628318 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.350916163 podStartE2EDuration="6.628292829s" podCreationTimestamp="2025-10-02 21:46:05 +0000 UTC" firstStartedPulling="2025-10-02 21:46:06.481545178 +0000 UTC m=+1357.804553197" lastFinishedPulling="2025-10-02 21:46:10.758921844 +0000 UTC m=+1362.081929863" observedRunningTime="2025-10-02 21:46:11.621505721 +0000 UTC m=+1362.944513750" watchObservedRunningTime="2025-10-02 21:46:11.628292829 +0000 UTC m=+1362.951300858" Oct 02 21:46:11 crc kubenswrapper[4636]: I1002 21:46:11.645115 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-jq4n9"] Oct 02 21:46:11 crc kubenswrapper[4636]: I1002 21:46:11.657315 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-jq4n9"] Oct 02 21:46:13 crc kubenswrapper[4636]: I1002 21:46:13.083001 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-6cfb778d8c-pmqb5" podUID="4bc228bc-6368-438e-a574-aa4c80d81dc6" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Oct 02 21:46:13 crc kubenswrapper[4636]: I1002 21:46:13.614878 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54b64571-ca07-484a-a2cc-e67fdffd5974" path="/var/lib/kubelet/pods/54b64571-ca07-484a-a2cc-e67fdffd5974/volumes" Oct 02 21:46:14 crc kubenswrapper[4636]: I1002 21:46:14.620503 4636 generic.go:334] "Generic (PLEG): container finished" podID="4d28e7d5-138e-4b42-bc0a-d118cca25785" containerID="4ac9f7567672b639710bb86a4936c3ee21159d9efc95cbc3c25626988f3ebae2" exitCode=0 Oct 02 21:46:14 crc kubenswrapper[4636]: I1002 21:46:14.620595 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b82lk" event={"ID":"4d28e7d5-138e-4b42-bc0a-d118cca25785","Type":"ContainerDied","Data":"4ac9f7567672b639710bb86a4936c3ee21159d9efc95cbc3c25626988f3ebae2"} Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.007964 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.094511 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxxj7\" (UniqueName: \"kubernetes.io/projected/4d28e7d5-138e-4b42-bc0a-d118cca25785-kube-api-access-zxxj7\") pod \"4d28e7d5-138e-4b42-bc0a-d118cca25785\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.094636 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-scripts\") pod \"4d28e7d5-138e-4b42-bc0a-d118cca25785\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.094656 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-config-data\") pod \"4d28e7d5-138e-4b42-bc0a-d118cca25785\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.094683 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-combined-ca-bundle\") pod \"4d28e7d5-138e-4b42-bc0a-d118cca25785\" (UID: \"4d28e7d5-138e-4b42-bc0a-d118cca25785\") " Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.101878 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d28e7d5-138e-4b42-bc0a-d118cca25785-kube-api-access-zxxj7" (OuterVolumeSpecName: "kube-api-access-zxxj7") pod "4d28e7d5-138e-4b42-bc0a-d118cca25785" (UID: "4d28e7d5-138e-4b42-bc0a-d118cca25785"). InnerVolumeSpecName "kube-api-access-zxxj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.102954 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-scripts" (OuterVolumeSpecName: "scripts") pod "4d28e7d5-138e-4b42-bc0a-d118cca25785" (UID: "4d28e7d5-138e-4b42-bc0a-d118cca25785"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.128436 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.129843 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4d28e7d5-138e-4b42-bc0a-d118cca25785" (UID: "4d28e7d5-138e-4b42-bc0a-d118cca25785"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.138252 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-config-data" (OuterVolumeSpecName: "config-data") pod "4d28e7d5-138e-4b42-bc0a-d118cca25785" (UID: "4d28e7d5-138e-4b42-bc0a-d118cca25785"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.143662 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.143808 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.196569 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxxj7\" (UniqueName: \"kubernetes.io/projected/4d28e7d5-138e-4b42-bc0a-d118cca25785-kube-api-access-zxxj7\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.196607 4636 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.196618 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.196629 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d28e7d5-138e-4b42-bc0a-d118cca25785-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.644707 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b82lk" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.645199 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b82lk" event={"ID":"4d28e7d5-138e-4b42-bc0a-d118cca25785","Type":"ContainerDied","Data":"1c28b8404d602703519b74235fb6da065970ec11e58d6372dc0ef08cc24eaecd"} Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.645238 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c28b8404d602703519b74235fb6da065970ec11e58d6372dc0ef08cc24eaecd" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.654264 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.831500 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.832060 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c3b3db60-4199-4c3a-9315-6d03316f47cd" containerName="nova-api-log" containerID="cri-o://8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba" gracePeriod=30 Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.832130 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c3b3db60-4199-4c3a-9315-6d03316f47cd" containerName="nova-api-api" containerID="cri-o://8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e" gracePeriod=30 Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.840316 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.840557 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d7d77db9-6965-47b9-96a1-45c6f93bef5a" containerName="nova-scheduler-scheduler" containerID="cri-o://002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa" gracePeriod=30 Oct 02 21:46:16 crc kubenswrapper[4636]: I1002 21:46:16.860226 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.406220 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.521221 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-public-tls-certs\") pod \"c3b3db60-4199-4c3a-9315-6d03316f47cd\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.521305 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3b3db60-4199-4c3a-9315-6d03316f47cd-logs\") pod \"c3b3db60-4199-4c3a-9315-6d03316f47cd\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.521435 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-config-data\") pod \"c3b3db60-4199-4c3a-9315-6d03316f47cd\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.521497 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zwhn\" (UniqueName: \"kubernetes.io/projected/c3b3db60-4199-4c3a-9315-6d03316f47cd-kube-api-access-4zwhn\") pod \"c3b3db60-4199-4c3a-9315-6d03316f47cd\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.521544 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-internal-tls-certs\") pod \"c3b3db60-4199-4c3a-9315-6d03316f47cd\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.521579 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-combined-ca-bundle\") pod \"c3b3db60-4199-4c3a-9315-6d03316f47cd\" (UID: \"c3b3db60-4199-4c3a-9315-6d03316f47cd\") " Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.521727 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3b3db60-4199-4c3a-9315-6d03316f47cd-logs" (OuterVolumeSpecName: "logs") pod "c3b3db60-4199-4c3a-9315-6d03316f47cd" (UID: "c3b3db60-4199-4c3a-9315-6d03316f47cd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.522235 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3b3db60-4199-4c3a-9315-6d03316f47cd-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.525882 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3b3db60-4199-4c3a-9315-6d03316f47cd-kube-api-access-4zwhn" (OuterVolumeSpecName: "kube-api-access-4zwhn") pod "c3b3db60-4199-4c3a-9315-6d03316f47cd" (UID: "c3b3db60-4199-4c3a-9315-6d03316f47cd"). InnerVolumeSpecName "kube-api-access-4zwhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.550135 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-config-data" (OuterVolumeSpecName: "config-data") pod "c3b3db60-4199-4c3a-9315-6d03316f47cd" (UID: "c3b3db60-4199-4c3a-9315-6d03316f47cd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.553900 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c3b3db60-4199-4c3a-9315-6d03316f47cd" (UID: "c3b3db60-4199-4c3a-9315-6d03316f47cd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.572345 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c3b3db60-4199-4c3a-9315-6d03316f47cd" (UID: "c3b3db60-4199-4c3a-9315-6d03316f47cd"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.574118 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c3b3db60-4199-4c3a-9315-6d03316f47cd" (UID: "c3b3db60-4199-4c3a-9315-6d03316f47cd"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.624351 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.624582 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zwhn\" (UniqueName: \"kubernetes.io/projected/c3b3db60-4199-4c3a-9315-6d03316f47cd-kube-api-access-4zwhn\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.624652 4636 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.624713 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.624790 4636 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3b3db60-4199-4c3a-9315-6d03316f47cd-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.653121 4636 generic.go:334] "Generic (PLEG): container finished" podID="c3b3db60-4199-4c3a-9315-6d03316f47cd" containerID="8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e" exitCode=0 Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.653377 4636 generic.go:334] "Generic (PLEG): container finished" podID="c3b3db60-4199-4c3a-9315-6d03316f47cd" containerID="8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba" exitCode=143 Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.653250 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.653168 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c3b3db60-4199-4c3a-9315-6d03316f47cd","Type":"ContainerDied","Data":"8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e"} Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.653573 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c3b3db60-4199-4c3a-9315-6d03316f47cd","Type":"ContainerDied","Data":"8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba"} Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.653588 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c3b3db60-4199-4c3a-9315-6d03316f47cd","Type":"ContainerDied","Data":"e9dc9c3a86474afc70fcef588492d36333c3db8a0d5582e41e0ba7b050ee7276"} Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.653614 4636 scope.go:117] "RemoveContainer" containerID="8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.674831 4636 scope.go:117] "RemoveContainer" containerID="8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.682011 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.700219 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.705019 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:17 crc kubenswrapper[4636]: E1002 21:46:17.705454 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54b64571-ca07-484a-a2cc-e67fdffd5974" containerName="dnsmasq-dns" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.705471 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="54b64571-ca07-484a-a2cc-e67fdffd5974" containerName="dnsmasq-dns" Oct 02 21:46:17 crc kubenswrapper[4636]: E1002 21:46:17.705487 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d28e7d5-138e-4b42-bc0a-d118cca25785" containerName="nova-manage" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.705494 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d28e7d5-138e-4b42-bc0a-d118cca25785" containerName="nova-manage" Oct 02 21:46:17 crc kubenswrapper[4636]: E1002 21:46:17.705518 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54b64571-ca07-484a-a2cc-e67fdffd5974" containerName="init" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.705524 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="54b64571-ca07-484a-a2cc-e67fdffd5974" containerName="init" Oct 02 21:46:17 crc kubenswrapper[4636]: E1002 21:46:17.705534 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3b3db60-4199-4c3a-9315-6d03316f47cd" containerName="nova-api-log" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.705541 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3b3db60-4199-4c3a-9315-6d03316f47cd" containerName="nova-api-log" Oct 02 21:46:17 crc kubenswrapper[4636]: E1002 21:46:17.705551 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3b3db60-4199-4c3a-9315-6d03316f47cd" containerName="nova-api-api" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.705557 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3b3db60-4199-4c3a-9315-6d03316f47cd" containerName="nova-api-api" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.705813 4636 scope.go:117] "RemoveContainer" containerID="8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.705939 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3b3db60-4199-4c3a-9315-6d03316f47cd" containerName="nova-api-api" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.705956 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="54b64571-ca07-484a-a2cc-e67fdffd5974" containerName="dnsmasq-dns" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.705970 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d28e7d5-138e-4b42-bc0a-d118cca25785" containerName="nova-manage" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.705980 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3b3db60-4199-4c3a-9315-6d03316f47cd" containerName="nova-api-log" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.706920 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.710539 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.710719 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.710839 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 02 21:46:17 crc kubenswrapper[4636]: E1002 21:46:17.711077 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e\": container with ID starting with 8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e not found: ID does not exist" containerID="8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.711107 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e"} err="failed to get container status \"8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e\": rpc error: code = NotFound desc = could not find container \"8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e\": container with ID starting with 8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e not found: ID does not exist" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.711136 4636 scope.go:117] "RemoveContainer" containerID="8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba" Oct 02 21:46:17 crc kubenswrapper[4636]: E1002 21:46:17.711463 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba\": container with ID starting with 8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba not found: ID does not exist" containerID="8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.711485 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba"} err="failed to get container status \"8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba\": rpc error: code = NotFound desc = could not find container \"8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba\": container with ID starting with 8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba not found: ID does not exist" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.711500 4636 scope.go:117] "RemoveContainer" containerID="8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.711681 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e"} err="failed to get container status \"8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e\": rpc error: code = NotFound desc = could not find container \"8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e\": container with ID starting with 8811d450e6cb9ab3d15ae505a722fdbf1730f2237e7eeeefebffaf24349edd5e not found: ID does not exist" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.711725 4636 scope.go:117] "RemoveContainer" containerID="8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.711909 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba"} err="failed to get container status \"8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba\": rpc error: code = NotFound desc = could not find container \"8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba\": container with ID starting with 8beac78fa18f461b67339eabc2a6db183e813b17a7c1975030b1396df533edba not found: ID does not exist" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.714077 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:17 crc kubenswrapper[4636]: E1002 21:46:17.743442 4636 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 02 21:46:17 crc kubenswrapper[4636]: E1002 21:46:17.744558 4636 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 02 21:46:17 crc kubenswrapper[4636]: E1002 21:46:17.748109 4636 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 02 21:46:17 crc kubenswrapper[4636]: E1002 21:46:17.748143 4636 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d7d77db9-6965-47b9-96a1-45c6f93bef5a" containerName="nova-scheduler-scheduler" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.828277 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-config-data\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.828374 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.828414 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44babad0-aad8-4dcf-870d-798d6444957f-logs\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.828458 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-public-tls-certs\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.828502 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g927d\" (UniqueName: \"kubernetes.io/projected/44babad0-aad8-4dcf-870d-798d6444957f-kube-api-access-g927d\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.828516 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.929737 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g927d\" (UniqueName: \"kubernetes.io/projected/44babad0-aad8-4dcf-870d-798d6444957f-kube-api-access-g927d\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.929794 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.929820 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-config-data\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.929890 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.929925 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44babad0-aad8-4dcf-870d-798d6444957f-logs\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.929966 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-public-tls-certs\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.930604 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44babad0-aad8-4dcf-870d-798d6444957f-logs\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.935488 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-config-data\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.936014 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.936143 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.936289 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/44babad0-aad8-4dcf-870d-798d6444957f-public-tls-certs\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:17 crc kubenswrapper[4636]: I1002 21:46:17.951787 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g927d\" (UniqueName: \"kubernetes.io/projected/44babad0-aad8-4dcf-870d-798d6444957f-kube-api-access-g927d\") pod \"nova-api-0\" (UID: \"44babad0-aad8-4dcf-870d-798d6444957f\") " pod="openstack/nova-api-0" Oct 02 21:46:18 crc kubenswrapper[4636]: I1002 21:46:18.029983 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 02 21:46:18 crc kubenswrapper[4636]: I1002 21:46:18.557995 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 02 21:46:18 crc kubenswrapper[4636]: I1002 21:46:18.676757 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-log" containerID="cri-o://edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522" gracePeriod=30 Oct 02 21:46:18 crc kubenswrapper[4636]: I1002 21:46:18.676844 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"44babad0-aad8-4dcf-870d-798d6444957f","Type":"ContainerStarted","Data":"ec12039f20d8d45ad14668e5a1eb62269ee0169c6f7a32976a9c28664d5f9b71"} Oct 02 21:46:18 crc kubenswrapper[4636]: I1002 21:46:18.677184 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-metadata" containerID="cri-o://6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d" gracePeriod=30 Oct 02 21:46:19 crc kubenswrapper[4636]: I1002 21:46:19.615304 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3b3db60-4199-4c3a-9315-6d03316f47cd" path="/var/lib/kubelet/pods/c3b3db60-4199-4c3a-9315-6d03316f47cd/volumes" Oct 02 21:46:19 crc kubenswrapper[4636]: I1002 21:46:19.687932 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"44babad0-aad8-4dcf-870d-798d6444957f","Type":"ContainerStarted","Data":"d3a90266fee3dc56a2e7ee234b30e871bae9cc779b21fb7243f4b0f9faa0c88f"} Oct 02 21:46:19 crc kubenswrapper[4636]: I1002 21:46:19.687998 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"44babad0-aad8-4dcf-870d-798d6444957f","Type":"ContainerStarted","Data":"c1b6e9f0f36298786bc3333965f01a078b30fd3ba2ff9d5c3340ca736016bd8a"} Oct 02 21:46:19 crc kubenswrapper[4636]: I1002 21:46:19.690410 4636 generic.go:334] "Generic (PLEG): container finished" podID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerID="edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522" exitCode=143 Oct 02 21:46:19 crc kubenswrapper[4636]: I1002 21:46:19.690472 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508","Type":"ContainerDied","Data":"edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522"} Oct 02 21:46:19 crc kubenswrapper[4636]: I1002 21:46:19.717516 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.717497672 podStartE2EDuration="2.717497672s" podCreationTimestamp="2025-10-02 21:46:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:46:19.705963589 +0000 UTC m=+1371.028971608" watchObservedRunningTime="2025-10-02 21:46:19.717497672 +0000 UTC m=+1371.040505691" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.548345 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.620048 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zshv\" (UniqueName: \"kubernetes.io/projected/d7d77db9-6965-47b9-96a1-45c6f93bef5a-kube-api-access-4zshv\") pod \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.620131 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-config-data\") pod \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.620410 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-combined-ca-bundle\") pod \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\" (UID: \"d7d77db9-6965-47b9-96a1-45c6f93bef5a\") " Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.638991 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7d77db9-6965-47b9-96a1-45c6f93bef5a-kube-api-access-4zshv" (OuterVolumeSpecName: "kube-api-access-4zshv") pod "d7d77db9-6965-47b9-96a1-45c6f93bef5a" (UID: "d7d77db9-6965-47b9-96a1-45c6f93bef5a"). InnerVolumeSpecName "kube-api-access-4zshv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.661703 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-config-data" (OuterVolumeSpecName: "config-data") pod "d7d77db9-6965-47b9-96a1-45c6f93bef5a" (UID: "d7d77db9-6965-47b9-96a1-45c6f93bef5a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.688518 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7d77db9-6965-47b9-96a1-45c6f93bef5a" (UID: "d7d77db9-6965-47b9-96a1-45c6f93bef5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.708473 4636 generic.go:334] "Generic (PLEG): container finished" podID="d7d77db9-6965-47b9-96a1-45c6f93bef5a" containerID="002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa" exitCode=0 Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.708515 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d7d77db9-6965-47b9-96a1-45c6f93bef5a","Type":"ContainerDied","Data":"002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa"} Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.708546 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d7d77db9-6965-47b9-96a1-45c6f93bef5a","Type":"ContainerDied","Data":"de1b2b3e814dd740a6bc237a8303c08d200443e816238ab8cf0acd1979513f3f"} Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.708542 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.708563 4636 scope.go:117] "RemoveContainer" containerID="002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.722599 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.722629 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zshv\" (UniqueName: \"kubernetes.io/projected/d7d77db9-6965-47b9-96a1-45c6f93bef5a-kube-api-access-4zshv\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.722640 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d77db9-6965-47b9-96a1-45c6f93bef5a-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.758148 4636 scope.go:117] "RemoveContainer" containerID="002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa" Oct 02 21:46:21 crc kubenswrapper[4636]: E1002 21:46:21.758618 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa\": container with ID starting with 002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa not found: ID does not exist" containerID="002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.758680 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa"} err="failed to get container status \"002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa\": rpc error: code = NotFound desc = could not find container \"002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa\": container with ID starting with 002c806e301fbebf24bcae93433e69b04e87bc1d30a67d14476d4719c132daaa not found: ID does not exist" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.763025 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.786842 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.796784 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:46:21 crc kubenswrapper[4636]: E1002 21:46:21.797231 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7d77db9-6965-47b9-96a1-45c6f93bef5a" containerName="nova-scheduler-scheduler" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.797244 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7d77db9-6965-47b9-96a1-45c6f93bef5a" containerName="nova-scheduler-scheduler" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.797405 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7d77db9-6965-47b9-96a1-45c6f93bef5a" containerName="nova-scheduler-scheduler" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.798089 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.801300 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.804612 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.897846 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": read tcp 10.217.0.2:50028->10.217.0.198:8775: read: connection reset by peer" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.897930 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": read tcp 10.217.0.2:50038->10.217.0.198:8775: read: connection reset by peer" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.950767 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgzt5\" (UniqueName: \"kubernetes.io/projected/19136e75-a199-48d4-8b98-40771a6b84fc-kube-api-access-tgzt5\") pod \"nova-scheduler-0\" (UID: \"19136e75-a199-48d4-8b98-40771a6b84fc\") " pod="openstack/nova-scheduler-0" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.950910 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19136e75-a199-48d4-8b98-40771a6b84fc-config-data\") pod \"nova-scheduler-0\" (UID: \"19136e75-a199-48d4-8b98-40771a6b84fc\") " pod="openstack/nova-scheduler-0" Oct 02 21:46:21 crc kubenswrapper[4636]: I1002 21:46:21.950947 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19136e75-a199-48d4-8b98-40771a6b84fc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"19136e75-a199-48d4-8b98-40771a6b84fc\") " pod="openstack/nova-scheduler-0" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.052699 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19136e75-a199-48d4-8b98-40771a6b84fc-config-data\") pod \"nova-scheduler-0\" (UID: \"19136e75-a199-48d4-8b98-40771a6b84fc\") " pod="openstack/nova-scheduler-0" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.052769 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19136e75-a199-48d4-8b98-40771a6b84fc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"19136e75-a199-48d4-8b98-40771a6b84fc\") " pod="openstack/nova-scheduler-0" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.052848 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgzt5\" (UniqueName: \"kubernetes.io/projected/19136e75-a199-48d4-8b98-40771a6b84fc-kube-api-access-tgzt5\") pod \"nova-scheduler-0\" (UID: \"19136e75-a199-48d4-8b98-40771a6b84fc\") " pod="openstack/nova-scheduler-0" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.057135 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19136e75-a199-48d4-8b98-40771a6b84fc-config-data\") pod \"nova-scheduler-0\" (UID: \"19136e75-a199-48d4-8b98-40771a6b84fc\") " pod="openstack/nova-scheduler-0" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.057332 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19136e75-a199-48d4-8b98-40771a6b84fc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"19136e75-a199-48d4-8b98-40771a6b84fc\") " pod="openstack/nova-scheduler-0" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.070304 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgzt5\" (UniqueName: \"kubernetes.io/projected/19136e75-a199-48d4-8b98-40771a6b84fc-kube-api-access-tgzt5\") pod \"nova-scheduler-0\" (UID: \"19136e75-a199-48d4-8b98-40771a6b84fc\") " pod="openstack/nova-scheduler-0" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.168993 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.346624 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.505708 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-logs\") pod \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.506146 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-config-data\") pod \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.506203 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-nova-metadata-tls-certs\") pod \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.506258 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xx9jh\" (UniqueName: \"kubernetes.io/projected/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-kube-api-access-xx9jh\") pod \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.506280 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-combined-ca-bundle\") pod \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\" (UID: \"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508\") " Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.509692 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-logs" (OuterVolumeSpecName: "logs") pod "e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" (UID: "e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.542663 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-kube-api-access-xx9jh" (OuterVolumeSpecName: "kube-api-access-xx9jh") pod "e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" (UID: "e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508"). InnerVolumeSpecName "kube-api-access-xx9jh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.548957 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-config-data" (OuterVolumeSpecName: "config-data") pod "e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" (UID: "e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.566826 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" (UID: "e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.599975 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" (UID: "e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.608082 4636 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-logs\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.608270 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.608332 4636 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.608391 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xx9jh\" (UniqueName: \"kubernetes.io/projected/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-kube-api-access-xx9jh\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:22 crc kubenswrapper[4636]: I1002 21:46:22.608464 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.777717 4636 generic.go:334] "Generic (PLEG): container finished" podID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerID="6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d" exitCode=0 Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.777775 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508","Type":"ContainerDied","Data":"6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d"} Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.777802 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508","Type":"ContainerDied","Data":"6e005e529a59da171615c1ed3be976b4194fef55c0056243a204ec38243dcded"} Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.777819 4636 scope.go:117] "RemoveContainer" containerID="6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.777955 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.843641 4636 scope.go:117] "RemoveContainer" containerID="edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.876971 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.901842 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.926248 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.927666 4636 scope.go:117] "RemoveContainer" containerID="6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d" Oct 02 21:46:23 crc kubenswrapper[4636]: E1002 21:46:22.932997 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d\": container with ID starting with 6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d not found: ID does not exist" containerID="6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.933281 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d"} err="failed to get container status \"6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d\": rpc error: code = NotFound desc = could not find container \"6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d\": container with ID starting with 6bc73b95448cec7df6c7831135524562181c469d8e571f2c42a03b61749a419d not found: ID does not exist" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.933307 4636 scope.go:117] "RemoveContainer" containerID="edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.938502 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:46:23 crc kubenswrapper[4636]: E1002 21:46:22.938914 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-metadata" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.938926 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-metadata" Oct 02 21:46:23 crc kubenswrapper[4636]: E1002 21:46:22.938972 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-log" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.938979 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-log" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.939228 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-metadata" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.939243 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" containerName="nova-metadata-log" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.940243 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: E1002 21:46:22.941239 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522\": container with ID starting with edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522 not found: ID does not exist" containerID="edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.941262 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522"} err="failed to get container status \"edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522\": rpc error: code = NotFound desc = could not find container \"edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522\": container with ID starting with edd4c78d70abc392e3664984125ddd236a08646718a25b02e3d43d647e7fb522 not found: ID does not exist" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.941374 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.949196 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:22.949397 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.121532 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97e51be-549b-475b-a257-71df6c99e2ef-config-data\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.121594 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d97e51be-549b-475b-a257-71df6c99e2ef-logs\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.121643 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv8ln\" (UniqueName: \"kubernetes.io/projected/d97e51be-549b-475b-a257-71df6c99e2ef-kube-api-access-sv8ln\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.121668 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97e51be-549b-475b-a257-71df6c99e2ef-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.121886 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97e51be-549b-475b-a257-71df6c99e2ef-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.223022 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d97e51be-549b-475b-a257-71df6c99e2ef-logs\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.223120 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv8ln\" (UniqueName: \"kubernetes.io/projected/d97e51be-549b-475b-a257-71df6c99e2ef-kube-api-access-sv8ln\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.223146 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97e51be-549b-475b-a257-71df6c99e2ef-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.223197 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97e51be-549b-475b-a257-71df6c99e2ef-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.223276 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97e51be-549b-475b-a257-71df6c99e2ef-config-data\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.223671 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d97e51be-549b-475b-a257-71df6c99e2ef-logs\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.227315 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97e51be-549b-475b-a257-71df6c99e2ef-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.227577 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97e51be-549b-475b-a257-71df6c99e2ef-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.228866 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97e51be-549b-475b-a257-71df6c99e2ef-config-data\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.248483 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv8ln\" (UniqueName: \"kubernetes.io/projected/d97e51be-549b-475b-a257-71df6c99e2ef-kube-api-access-sv8ln\") pod \"nova-metadata-0\" (UID: \"d97e51be-549b-475b-a257-71df6c99e2ef\") " pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.287365 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.614703 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7d77db9-6965-47b9-96a1-45c6f93bef5a" path="/var/lib/kubelet/pods/d7d77db9-6965-47b9-96a1-45c6f93bef5a/volumes" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.616199 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508" path="/var/lib/kubelet/pods/e4d4d10e-95a7-4f6b-bdcf-e4d48ac10508/volumes" Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.787245 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"19136e75-a199-48d4-8b98-40771a6b84fc","Type":"ContainerStarted","Data":"7fd5381644bce1e7c68dac6194014abe924a63464a98c853c0e1d19c4e15c18e"} Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.787283 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"19136e75-a199-48d4-8b98-40771a6b84fc","Type":"ContainerStarted","Data":"291b6ae9f40c743c65691ff8080c00d35296ca72a963917d1cdaf2e24ba2dc41"} Oct 02 21:46:23 crc kubenswrapper[4636]: I1002 21:46:23.812260 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 02 21:46:23 crc kubenswrapper[4636]: W1002 21:46:23.812831 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd97e51be_549b_475b_a257_71df6c99e2ef.slice/crio-c86c37d5f7c242072f37a8b02ff0c012ec9ab7eb5369f8ddc3bfbf3313b82d2a WatchSource:0}: Error finding container c86c37d5f7c242072f37a8b02ff0c012ec9ab7eb5369f8ddc3bfbf3313b82d2a: Status 404 returned error can't find the container with id c86c37d5f7c242072f37a8b02ff0c012ec9ab7eb5369f8ddc3bfbf3313b82d2a Oct 02 21:46:24 crc kubenswrapper[4636]: I1002 21:46:24.797675 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d97e51be-549b-475b-a257-71df6c99e2ef","Type":"ContainerStarted","Data":"1251428f145481bbec174450170234b583ea55338b11e0becf27f56c04ca1036"} Oct 02 21:46:24 crc kubenswrapper[4636]: I1002 21:46:24.798206 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d97e51be-549b-475b-a257-71df6c99e2ef","Type":"ContainerStarted","Data":"dfb1b8702947ec5d69d90a347516b4c846b41969c8aeca816c18ce7931e64645"} Oct 02 21:46:24 crc kubenswrapper[4636]: I1002 21:46:24.798219 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d97e51be-549b-475b-a257-71df6c99e2ef","Type":"ContainerStarted","Data":"c86c37d5f7c242072f37a8b02ff0c012ec9ab7eb5369f8ddc3bfbf3313b82d2a"} Oct 02 21:46:24 crc kubenswrapper[4636]: I1002 21:46:24.817573 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.817549113 podStartE2EDuration="3.817549113s" podCreationTimestamp="2025-10-02 21:46:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:46:23.818011965 +0000 UTC m=+1375.141019984" watchObservedRunningTime="2025-10-02 21:46:24.817549113 +0000 UTC m=+1376.140557132" Oct 02 21:46:24 crc kubenswrapper[4636]: I1002 21:46:24.820518 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.820509151 podStartE2EDuration="2.820509151s" podCreationTimestamp="2025-10-02 21:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:46:24.812786658 +0000 UTC m=+1376.135794687" watchObservedRunningTime="2025-10-02 21:46:24.820509151 +0000 UTC m=+1376.143517170" Oct 02 21:46:27 crc kubenswrapper[4636]: I1002 21:46:27.169147 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 02 21:46:28 crc kubenswrapper[4636]: I1002 21:46:28.031307 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 02 21:46:28 crc kubenswrapper[4636]: I1002 21:46:28.031385 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 02 21:46:28 crc kubenswrapper[4636]: I1002 21:46:28.288568 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 02 21:46:28 crc kubenswrapper[4636]: I1002 21:46:28.290548 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 02 21:46:29 crc kubenswrapper[4636]: I1002 21:46:29.044022 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="44babad0-aad8-4dcf-870d-798d6444957f" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:46:29 crc kubenswrapper[4636]: I1002 21:46:29.044050 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="44babad0-aad8-4dcf-870d-798d6444957f" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:46:32 crc kubenswrapper[4636]: I1002 21:46:32.170033 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 02 21:46:32 crc kubenswrapper[4636]: I1002 21:46:32.214607 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 02 21:46:32 crc kubenswrapper[4636]: I1002 21:46:32.947855 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 02 21:46:33 crc kubenswrapper[4636]: I1002 21:46:33.289210 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 02 21:46:33 crc kubenswrapper[4636]: I1002 21:46:33.289315 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 02 21:46:34 crc kubenswrapper[4636]: I1002 21:46:34.297309 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d97e51be-549b-475b-a257-71df6c99e2ef" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:46:34 crc kubenswrapper[4636]: I1002 21:46:34.297341 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d97e51be-549b-475b-a257-71df6c99e2ef" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 02 21:46:35 crc kubenswrapper[4636]: I1002 21:46:35.903940 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 02 21:46:38 crc kubenswrapper[4636]: I1002 21:46:38.038456 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 02 21:46:38 crc kubenswrapper[4636]: I1002 21:46:38.038652 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 02 21:46:38 crc kubenswrapper[4636]: I1002 21:46:38.038818 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 02 21:46:38 crc kubenswrapper[4636]: I1002 21:46:38.038859 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 02 21:46:38 crc kubenswrapper[4636]: I1002 21:46:38.065786 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 02 21:46:38 crc kubenswrapper[4636]: I1002 21:46:38.070989 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 02 21:46:43 crc kubenswrapper[4636]: I1002 21:46:43.295029 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 02 21:46:43 crc kubenswrapper[4636]: I1002 21:46:43.301386 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 02 21:46:43 crc kubenswrapper[4636]: I1002 21:46:43.303047 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 02 21:46:44 crc kubenswrapper[4636]: I1002 21:46:44.044625 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 02 21:46:51 crc kubenswrapper[4636]: I1002 21:46:51.795365 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 02 21:46:52 crc kubenswrapper[4636]: I1002 21:46:52.690417 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 02 21:46:56 crc kubenswrapper[4636]: I1002 21:46:56.628283 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="780601d4-af7f-47ee-b580-939d5531e805" containerName="rabbitmq" containerID="cri-o://ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd" gracePeriod=604796 Oct 02 21:46:56 crc kubenswrapper[4636]: I1002 21:46:56.639221 4636 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="780601d4-af7f-47ee-b580-939d5531e805" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Oct 02 21:46:57 crc kubenswrapper[4636]: I1002 21:46:57.459903 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" containerName="rabbitmq" containerID="cri-o://60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff" gracePeriod=604796 Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.172311 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.232419 4636 generic.go:334] "Generic (PLEG): container finished" podID="780601d4-af7f-47ee-b580-939d5531e805" containerID="ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd" exitCode=0 Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.232479 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"780601d4-af7f-47ee-b580-939d5531e805","Type":"ContainerDied","Data":"ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd"} Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.232476 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.232521 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"780601d4-af7f-47ee-b580-939d5531e805","Type":"ContainerDied","Data":"44b07f54af556a5bd7326d5cf80b28674cea14bbf2e3eb26bceeae50cd523de4"} Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.232550 4636 scope.go:117] "RemoveContainer" containerID="ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.258295 4636 scope.go:117] "RemoveContainer" containerID="1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.285853 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-erlang-cookie\") pod \"780601d4-af7f-47ee-b580-939d5531e805\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.285968 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-confd\") pod \"780601d4-af7f-47ee-b580-939d5531e805\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.286060 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-config-data\") pod \"780601d4-af7f-47ee-b580-939d5531e805\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.286113 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-plugins-conf\") pod \"780601d4-af7f-47ee-b580-939d5531e805\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.286187 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"780601d4-af7f-47ee-b580-939d5531e805\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.286224 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/780601d4-af7f-47ee-b580-939d5531e805-erlang-cookie-secret\") pod \"780601d4-af7f-47ee-b580-939d5531e805\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.286254 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-tls\") pod \"780601d4-af7f-47ee-b580-939d5531e805\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.286305 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/780601d4-af7f-47ee-b580-939d5531e805-pod-info\") pod \"780601d4-af7f-47ee-b580-939d5531e805\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.286335 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-plugins\") pod \"780601d4-af7f-47ee-b580-939d5531e805\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.286355 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkmpz\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-kube-api-access-fkmpz\") pod \"780601d4-af7f-47ee-b580-939d5531e805\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.286417 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-server-conf\") pod \"780601d4-af7f-47ee-b580-939d5531e805\" (UID: \"780601d4-af7f-47ee-b580-939d5531e805\") " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.288056 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "780601d4-af7f-47ee-b580-939d5531e805" (UID: "780601d4-af7f-47ee-b580-939d5531e805"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.290864 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "780601d4-af7f-47ee-b580-939d5531e805" (UID: "780601d4-af7f-47ee-b580-939d5531e805"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.291063 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "780601d4-af7f-47ee-b580-939d5531e805" (UID: "780601d4-af7f-47ee-b580-939d5531e805"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.299244 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/780601d4-af7f-47ee-b580-939d5531e805-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "780601d4-af7f-47ee-b580-939d5531e805" (UID: "780601d4-af7f-47ee-b580-939d5531e805"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.299277 4636 scope.go:117] "RemoveContainer" containerID="ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd" Oct 02 21:47:03 crc kubenswrapper[4636]: E1002 21:47:03.304357 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd\": container with ID starting with ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd not found: ID does not exist" containerID="ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.304661 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd"} err="failed to get container status \"ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd\": rpc error: code = NotFound desc = could not find container \"ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd\": container with ID starting with ffa2ccf9d8afc8eed2b9fd9a9a100f372efba2ffbac170988699380ab5caf0cd not found: ID does not exist" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.304689 4636 scope.go:117] "RemoveContainer" containerID="1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a" Oct 02 21:47:03 crc kubenswrapper[4636]: E1002 21:47:03.307569 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a\": container with ID starting with 1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a not found: ID does not exist" containerID="1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.307602 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a"} err="failed to get container status \"1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a\": rpc error: code = NotFound desc = could not find container \"1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a\": container with ID starting with 1c8116871f74deecf9e363c1c5395095745f297b0b2a68134cb51f48f334311a not found: ID does not exist" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.310608 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "780601d4-af7f-47ee-b580-939d5531e805" (UID: "780601d4-af7f-47ee-b580-939d5531e805"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.327247 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/780601d4-af7f-47ee-b580-939d5531e805-pod-info" (OuterVolumeSpecName: "pod-info") pod "780601d4-af7f-47ee-b580-939d5531e805" (UID: "780601d4-af7f-47ee-b580-939d5531e805"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.328108 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "780601d4-af7f-47ee-b580-939d5531e805" (UID: "780601d4-af7f-47ee-b580-939d5531e805"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.337956 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-kube-api-access-fkmpz" (OuterVolumeSpecName: "kube-api-access-fkmpz") pod "780601d4-af7f-47ee-b580-939d5531e805" (UID: "780601d4-af7f-47ee-b580-939d5531e805"). InnerVolumeSpecName "kube-api-access-fkmpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.377343 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-config-data" (OuterVolumeSpecName: "config-data") pod "780601d4-af7f-47ee-b580-939d5531e805" (UID: "780601d4-af7f-47ee-b580-939d5531e805"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.389384 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkmpz\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-kube-api-access-fkmpz\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.389415 4636 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.389424 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.389433 4636 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.389454 4636 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.389462 4636 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/780601d4-af7f-47ee-b580-939d5531e805-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.389472 4636 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.389481 4636 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/780601d4-af7f-47ee-b580-939d5531e805-pod-info\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.389489 4636 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.420478 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-server-conf" (OuterVolumeSpecName: "server-conf") pod "780601d4-af7f-47ee-b580-939d5531e805" (UID: "780601d4-af7f-47ee-b580-939d5531e805"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.427003 4636 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.493863 4636 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.493888 4636 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/780601d4-af7f-47ee-b580-939d5531e805-server-conf\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.496947 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "780601d4-af7f-47ee-b580-939d5531e805" (UID: "780601d4-af7f-47ee-b580-939d5531e805"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.560592 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.568160 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.595067 4636 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/780601d4-af7f-47ee-b580-939d5531e805-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.615891 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="780601d4-af7f-47ee-b580-939d5531e805" path="/var/lib/kubelet/pods/780601d4-af7f-47ee-b580-939d5531e805/volumes" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.616528 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 02 21:47:03 crc kubenswrapper[4636]: E1002 21:47:03.616853 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="780601d4-af7f-47ee-b580-939d5531e805" containerName="rabbitmq" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.616869 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="780601d4-af7f-47ee-b580-939d5531e805" containerName="rabbitmq" Oct 02 21:47:03 crc kubenswrapper[4636]: E1002 21:47:03.616900 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="780601d4-af7f-47ee-b580-939d5531e805" containerName="setup-container" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.616907 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="780601d4-af7f-47ee-b580-939d5531e805" containerName="setup-container" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.617077 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="780601d4-af7f-47ee-b580-939d5531e805" containerName="rabbitmq" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.618269 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.620081 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.620277 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.620454 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.620617 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.621204 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-vvlmg" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.622812 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.622851 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.642402 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.798704 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.798780 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/379f5124-2304-4bd2-9765-757caebdd35a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.798885 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/379f5124-2304-4bd2-9765-757caebdd35a-config-data\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.798949 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/379f5124-2304-4bd2-9765-757caebdd35a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.799066 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.799112 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92dr7\" (UniqueName: \"kubernetes.io/projected/379f5124-2304-4bd2-9765-757caebdd35a-kube-api-access-92dr7\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.799235 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.799314 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.799356 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/379f5124-2304-4bd2-9765-757caebdd35a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.799385 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.799430 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/379f5124-2304-4bd2-9765-757caebdd35a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.900727 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.901032 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/379f5124-2304-4bd2-9765-757caebdd35a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.901049 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.901076 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/379f5124-2304-4bd2-9765-757caebdd35a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.901151 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.901187 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/379f5124-2304-4bd2-9765-757caebdd35a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.901206 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/379f5124-2304-4bd2-9765-757caebdd35a-config-data\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.901223 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/379f5124-2304-4bd2-9765-757caebdd35a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.901249 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.901268 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92dr7\" (UniqueName: \"kubernetes.io/projected/379f5124-2304-4bd2-9765-757caebdd35a-kube-api-access-92dr7\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.901300 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.901557 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.902948 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.903089 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.907252 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/379f5124-2304-4bd2-9765-757caebdd35a-config-data\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.921043 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/379f5124-2304-4bd2-9765-757caebdd35a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.921450 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/379f5124-2304-4bd2-9765-757caebdd35a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.922037 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92dr7\" (UniqueName: \"kubernetes.io/projected/379f5124-2304-4bd2-9765-757caebdd35a-kube-api-access-92dr7\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.927116 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/379f5124-2304-4bd2-9765-757caebdd35a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.927371 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.927875 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/379f5124-2304-4bd2-9765-757caebdd35a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.930939 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/379f5124-2304-4bd2-9765-757caebdd35a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.951485 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"379f5124-2304-4bd2-9765-757caebdd35a\") " pod="openstack/rabbitmq-server-0" Oct 02 21:47:03 crc kubenswrapper[4636]: I1002 21:47:03.955239 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.048954 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.207395 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-568pj\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-kube-api-access-568pj\") pod \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.207877 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-server-conf\") pod \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.207921 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-erlang-cookie-secret\") pod \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.207955 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-plugins-conf\") pod \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.208037 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.208081 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-erlang-cookie\") pod \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.208102 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-pod-info\") pod \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.208152 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-confd\") pod \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.208177 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-tls\") pod \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.208209 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-plugins\") pod \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.208265 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-config-data\") pod \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\" (UID: \"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9\") " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.217093 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" (UID: "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.220083 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" (UID: "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.220318 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-kube-api-access-568pj" (OuterVolumeSpecName: "kube-api-access-568pj") pod "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" (UID: "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9"). InnerVolumeSpecName "kube-api-access-568pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.222296 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" (UID: "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.222678 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" (UID: "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.226881 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-pod-info" (OuterVolumeSpecName: "pod-info") pod "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" (UID: "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.231505 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" (UID: "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.235074 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" (UID: "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.257732 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-config-data" (OuterVolumeSpecName: "config-data") pod "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" (UID: "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.260128 4636 generic.go:334] "Generic (PLEG): container finished" podID="5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" containerID="60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff" exitCode=0 Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.260185 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9","Type":"ContainerDied","Data":"60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff"} Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.260210 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5a227f5a-7dbd-49f9-b8a0-62c3911fefa9","Type":"ContainerDied","Data":"f80d1359216378e382bab24740336e669283d37ff98c6f40b15f9867e938dadf"} Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.260244 4636 scope.go:117] "RemoveContainer" containerID="60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.260377 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.298231 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-server-conf" (OuterVolumeSpecName: "server-conf") pod "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" (UID: "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.304883 4636 scope.go:117] "RemoveContainer" containerID="0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.310080 4636 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.310114 4636 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.310124 4636 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-pod-info\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.310132 4636 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.310140 4636 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.310151 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.310160 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-568pj\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-kube-api-access-568pj\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.310168 4636 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-server-conf\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.310176 4636 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.310184 4636 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.335698 4636 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.338030 4636 scope.go:117] "RemoveContainer" containerID="60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff" Oct 02 21:47:04 crc kubenswrapper[4636]: E1002 21:47:04.338578 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff\": container with ID starting with 60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff not found: ID does not exist" containerID="60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.338606 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff"} err="failed to get container status \"60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff\": rpc error: code = NotFound desc = could not find container \"60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff\": container with ID starting with 60fef04a994552c1a9837ca8450f2ed1da6cafb1f4c760006b9835bf1277b7ff not found: ID does not exist" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.338645 4636 scope.go:117] "RemoveContainer" containerID="0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7" Oct 02 21:47:04 crc kubenswrapper[4636]: E1002 21:47:04.339077 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7\": container with ID starting with 0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7 not found: ID does not exist" containerID="0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.339095 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7"} err="failed to get container status \"0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7\": rpc error: code = NotFound desc = could not find container \"0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7\": container with ID starting with 0f30dd6b5d0be64d57746f1627f822097cf2fc1c268c35389015f622d6df00a7 not found: ID does not exist" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.388027 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" (UID: "5a227f5a-7dbd-49f9-b8a0-62c3911fefa9"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.411291 4636 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.411774 4636 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.492837 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 02 21:47:04 crc kubenswrapper[4636]: W1002 21:47:04.498248 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod379f5124_2304_4bd2_9765_757caebdd35a.slice/crio-a500ebfb793ade8cabe552d5cd62683494cce01f4f1d3ab8a8280d812488ba0a WatchSource:0}: Error finding container a500ebfb793ade8cabe552d5cd62683494cce01f4f1d3ab8a8280d812488ba0a: Status 404 returned error can't find the container with id a500ebfb793ade8cabe552d5cd62683494cce01f4f1d3ab8a8280d812488ba0a Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.609166 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.618226 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.630400 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 02 21:47:04 crc kubenswrapper[4636]: E1002 21:47:04.630763 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" containerName="setup-container" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.630779 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" containerName="setup-container" Oct 02 21:47:04 crc kubenswrapper[4636]: E1002 21:47:04.630795 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" containerName="rabbitmq" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.630802 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" containerName="rabbitmq" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.630967 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" containerName="rabbitmq" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.631876 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.634319 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.634551 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.635868 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.636119 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.636341 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.637201 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.637670 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-zl72l" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.658527 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.716352 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.716445 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.716512 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.716575 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.716592 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.716631 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnxz2\" (UniqueName: \"kubernetes.io/projected/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-kube-api-access-xnxz2\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.716660 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.716693 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.716722 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.716828 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.716868 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.818646 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnxz2\" (UniqueName: \"kubernetes.io/projected/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-kube-api-access-xnxz2\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.818690 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.818731 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.818774 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.818803 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.818828 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.818848 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.818880 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.818932 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.818954 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.818970 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.819856 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.820054 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.820409 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.820628 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.820674 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.821854 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.824286 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.824446 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.824570 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.825161 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.844493 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnxz2\" (UniqueName: \"kubernetes.io/projected/8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73-kube-api-access-xnxz2\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.875662 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73\") " pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:04 crc kubenswrapper[4636]: I1002 21:47:04.951054 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:05 crc kubenswrapper[4636]: I1002 21:47:05.270016 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"379f5124-2304-4bd2-9765-757caebdd35a","Type":"ContainerStarted","Data":"a500ebfb793ade8cabe552d5cd62683494cce01f4f1d3ab8a8280d812488ba0a"} Oct 02 21:47:05 crc kubenswrapper[4636]: I1002 21:47:05.408087 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 02 21:47:05 crc kubenswrapper[4636]: I1002 21:47:05.616482 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a227f5a-7dbd-49f9-b8a0-62c3911fefa9" path="/var/lib/kubelet/pods/5a227f5a-7dbd-49f9-b8a0-62c3911fefa9/volumes" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.295799 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"379f5124-2304-4bd2-9765-757caebdd35a","Type":"ContainerStarted","Data":"36e80298b830f034e1a0b1bcdf2ff4ce6d947dd8f5a6427276d8d6fdaad82cbc"} Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.298232 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73","Type":"ContainerStarted","Data":"4e61290ef499819ede54d50e592bc37e0b6e62b696b48acfb99d7fee84a66896"} Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.771510 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pxq8v"] Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.777286 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.780407 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.783725 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pxq8v"] Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.854897 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.854985 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2kz2\" (UniqueName: \"kubernetes.io/projected/26f39e78-df18-4c0d-9bf3-f29d11459ea9-kube-api-access-n2kz2\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.855039 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.855090 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-config\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.855135 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.855201 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-svc\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.855238 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.956978 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2kz2\" (UniqueName: \"kubernetes.io/projected/26f39e78-df18-4c0d-9bf3-f29d11459ea9-kube-api-access-n2kz2\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.957361 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.957419 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-config\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.957470 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.957539 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-svc\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.957573 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.957608 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.958598 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.959349 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-config\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.959734 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.959890 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-svc\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.960462 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.964407 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:06 crc kubenswrapper[4636]: I1002 21:47:06.996560 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2kz2\" (UniqueName: \"kubernetes.io/projected/26f39e78-df18-4c0d-9bf3-f29d11459ea9-kube-api-access-n2kz2\") pod \"dnsmasq-dns-67b789f86c-pxq8v\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:07 crc kubenswrapper[4636]: I1002 21:47:07.111999 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:07 crc kubenswrapper[4636]: I1002 21:47:07.317051 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73","Type":"ContainerStarted","Data":"33c237fa4fd9295527d77790f5ae55a45a81e0ab05ec0c5f5ff44470da589b0a"} Oct 02 21:47:07 crc kubenswrapper[4636]: I1002 21:47:07.587068 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pxq8v"] Oct 02 21:47:07 crc kubenswrapper[4636]: W1002 21:47:07.591784 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26f39e78_df18_4c0d_9bf3_f29d11459ea9.slice/crio-d527e52270705f2dab38c66f7df41d9a6cab5a966bd58efe9c0e26e0850a43b8 WatchSource:0}: Error finding container d527e52270705f2dab38c66f7df41d9a6cab5a966bd58efe9c0e26e0850a43b8: Status 404 returned error can't find the container with id d527e52270705f2dab38c66f7df41d9a6cab5a966bd58efe9c0e26e0850a43b8 Oct 02 21:47:08 crc kubenswrapper[4636]: I1002 21:47:08.326400 4636 generic.go:334] "Generic (PLEG): container finished" podID="26f39e78-df18-4c0d-9bf3-f29d11459ea9" containerID="10502b9822ccdae5ca9e3a107849571fbf6d5e9f3844686f583a027f84fb5176" exitCode=0 Oct 02 21:47:08 crc kubenswrapper[4636]: I1002 21:47:08.326495 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" event={"ID":"26f39e78-df18-4c0d-9bf3-f29d11459ea9","Type":"ContainerDied","Data":"10502b9822ccdae5ca9e3a107849571fbf6d5e9f3844686f583a027f84fb5176"} Oct 02 21:47:08 crc kubenswrapper[4636]: I1002 21:47:08.326768 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" event={"ID":"26f39e78-df18-4c0d-9bf3-f29d11459ea9","Type":"ContainerStarted","Data":"d527e52270705f2dab38c66f7df41d9a6cab5a966bd58efe9c0e26e0850a43b8"} Oct 02 21:47:09 crc kubenswrapper[4636]: I1002 21:47:09.339912 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" event={"ID":"26f39e78-df18-4c0d-9bf3-f29d11459ea9","Type":"ContainerStarted","Data":"ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70"} Oct 02 21:47:09 crc kubenswrapper[4636]: I1002 21:47:09.340269 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:09 crc kubenswrapper[4636]: I1002 21:47:09.363535 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" podStartSLOduration=3.363517282 podStartE2EDuration="3.363517282s" podCreationTimestamp="2025-10-02 21:47:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:47:09.358702016 +0000 UTC m=+1420.681710055" watchObservedRunningTime="2025-10-02 21:47:09.363517282 +0000 UTC m=+1420.686525301" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.113687 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.187653 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9khnk"] Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.187965 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" podUID="fc3ee6cf-52bc-4586-b226-c40a74952c04" containerName="dnsmasq-dns" containerID="cri-o://0bea228f50f6510275aedb8d338944230df07f1579325930e830d9debb39fb4f" gracePeriod=10 Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.372855 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79dc84bdb7-4sx8m"] Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.374349 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.414985 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79dc84bdb7-4sx8m"] Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.448642 4636 generic.go:334] "Generic (PLEG): container finished" podID="fc3ee6cf-52bc-4586-b226-c40a74952c04" containerID="0bea228f50f6510275aedb8d338944230df07f1579325930e830d9debb39fb4f" exitCode=0 Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.448731 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" event={"ID":"fc3ee6cf-52bc-4586-b226-c40a74952c04","Type":"ContainerDied","Data":"0bea228f50f6510275aedb8d338944230df07f1579325930e830d9debb39fb4f"} Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.477646 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-openstack-edpm-ipam\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.477687 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-ovsdbserver-sb\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.477762 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-config\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.477788 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnfwz\" (UniqueName: \"kubernetes.io/projected/b21803db-480b-4261-a6d3-62042843c92f-kube-api-access-tnfwz\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.477813 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-dns-svc\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.477843 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-ovsdbserver-nb\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.477893 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-dns-swift-storage-0\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.580881 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-dns-swift-storage-0\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.580955 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-openstack-edpm-ipam\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.580976 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-ovsdbserver-sb\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.581031 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-config\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.581065 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnfwz\" (UniqueName: \"kubernetes.io/projected/b21803db-480b-4261-a6d3-62042843c92f-kube-api-access-tnfwz\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.581090 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-dns-svc\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.581119 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-ovsdbserver-nb\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.581925 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-ovsdbserver-nb\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.582057 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-ovsdbserver-sb\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.582604 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-dns-swift-storage-0\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.583100 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-config\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.583176 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-openstack-edpm-ipam\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.583822 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b21803db-480b-4261-a6d3-62042843c92f-dns-svc\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.611856 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnfwz\" (UniqueName: \"kubernetes.io/projected/b21803db-480b-4261-a6d3-62042843c92f-kube-api-access-tnfwz\") pod \"dnsmasq-dns-79dc84bdb7-4sx8m\" (UID: \"b21803db-480b-4261-a6d3-62042843c92f\") " pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.713249 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.817786 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.894889 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-svc\") pod \"fc3ee6cf-52bc-4586-b226-c40a74952c04\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.895112 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnjbk\" (UniqueName: \"kubernetes.io/projected/fc3ee6cf-52bc-4586-b226-c40a74952c04-kube-api-access-fnjbk\") pod \"fc3ee6cf-52bc-4586-b226-c40a74952c04\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.900543 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc3ee6cf-52bc-4586-b226-c40a74952c04-kube-api-access-fnjbk" (OuterVolumeSpecName: "kube-api-access-fnjbk") pod "fc3ee6cf-52bc-4586-b226-c40a74952c04" (UID: "fc3ee6cf-52bc-4586-b226-c40a74952c04"). InnerVolumeSpecName "kube-api-access-fnjbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:47:17 crc kubenswrapper[4636]: I1002 21:47:17.947159 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fc3ee6cf-52bc-4586-b226-c40a74952c04" (UID: "fc3ee6cf-52bc-4586-b226-c40a74952c04"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.002599 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-nb\") pod \"fc3ee6cf-52bc-4586-b226-c40a74952c04\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.002652 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-swift-storage-0\") pod \"fc3ee6cf-52bc-4586-b226-c40a74952c04\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.002707 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-sb\") pod \"fc3ee6cf-52bc-4586-b226-c40a74952c04\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.002726 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-config\") pod \"fc3ee6cf-52bc-4586-b226-c40a74952c04\" (UID: \"fc3ee6cf-52bc-4586-b226-c40a74952c04\") " Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.003265 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnjbk\" (UniqueName: \"kubernetes.io/projected/fc3ee6cf-52bc-4586-b226-c40a74952c04-kube-api-access-fnjbk\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.003277 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.055004 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fc3ee6cf-52bc-4586-b226-c40a74952c04" (UID: "fc3ee6cf-52bc-4586-b226-c40a74952c04"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.068523 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fc3ee6cf-52bc-4586-b226-c40a74952c04" (UID: "fc3ee6cf-52bc-4586-b226-c40a74952c04"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.074168 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fc3ee6cf-52bc-4586-b226-c40a74952c04" (UID: "fc3ee6cf-52bc-4586-b226-c40a74952c04"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.082441 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-config" (OuterVolumeSpecName: "config") pod "fc3ee6cf-52bc-4586-b226-c40a74952c04" (UID: "fc3ee6cf-52bc-4586-b226-c40a74952c04"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.104842 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.105104 4636 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.105175 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.105231 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc3ee6cf-52bc-4586-b226-c40a74952c04-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.217331 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79dc84bdb7-4sx8m"] Oct 02 21:47:18 crc kubenswrapper[4636]: W1002 21:47:18.219963 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb21803db_480b_4261_a6d3_62042843c92f.slice/crio-cc899f4688a1ac45dd1612044053d0d38948b94746211946f14bf48e89170d36 WatchSource:0}: Error finding container cc899f4688a1ac45dd1612044053d0d38948b94746211946f14bf48e89170d36: Status 404 returned error can't find the container with id cc899f4688a1ac45dd1612044053d0d38948b94746211946f14bf48e89170d36 Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.459987 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" event={"ID":"fc3ee6cf-52bc-4586-b226-c40a74952c04","Type":"ContainerDied","Data":"5c63806479055899f66f62c86c1d0bdef2933cb283b0a5e494b3f8e0dc83e36b"} Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.460041 4636 scope.go:117] "RemoveContainer" containerID="0bea228f50f6510275aedb8d338944230df07f1579325930e830d9debb39fb4f" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.460053 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-9khnk" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.462984 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" event={"ID":"b21803db-480b-4261-a6d3-62042843c92f","Type":"ContainerStarted","Data":"cc899f4688a1ac45dd1612044053d0d38948b94746211946f14bf48e89170d36"} Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.485318 4636 scope.go:117] "RemoveContainer" containerID="3e1cd06718fdecdaaddca6978d042f225d4e01b4325cdf818af2094c88197fcb" Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.503479 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9khnk"] Oct 02 21:47:18 crc kubenswrapper[4636]: I1002 21:47:18.542507 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9khnk"] Oct 02 21:47:19 crc kubenswrapper[4636]: I1002 21:47:19.477467 4636 generic.go:334] "Generic (PLEG): container finished" podID="b21803db-480b-4261-a6d3-62042843c92f" containerID="588ce2b93ae740229c27770d2e60ac35b8ca7df443ae0e39e5ae3d7750680855" exitCode=0 Oct 02 21:47:19 crc kubenswrapper[4636]: I1002 21:47:19.477526 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" event={"ID":"b21803db-480b-4261-a6d3-62042843c92f","Type":"ContainerDied","Data":"588ce2b93ae740229c27770d2e60ac35b8ca7df443ae0e39e5ae3d7750680855"} Oct 02 21:47:19 crc kubenswrapper[4636]: I1002 21:47:19.615136 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc3ee6cf-52bc-4586-b226-c40a74952c04" path="/var/lib/kubelet/pods/fc3ee6cf-52bc-4586-b226-c40a74952c04/volumes" Oct 02 21:47:20 crc kubenswrapper[4636]: I1002 21:47:20.540798 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" event={"ID":"b21803db-480b-4261-a6d3-62042843c92f","Type":"ContainerStarted","Data":"ff82622c1634719af91f39544ad5a1fdb3c38d4ab1018b78b7544b0fcd903b85"} Oct 02 21:47:20 crc kubenswrapper[4636]: I1002 21:47:20.541074 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:20 crc kubenswrapper[4636]: I1002 21:47:20.556921 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" podStartSLOduration=3.5569025119999997 podStartE2EDuration="3.556902512s" podCreationTimestamp="2025-10-02 21:47:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:47:20.555413353 +0000 UTC m=+1431.878421382" watchObservedRunningTime="2025-10-02 21:47:20.556902512 +0000 UTC m=+1431.879910531" Oct 02 21:47:27 crc kubenswrapper[4636]: I1002 21:47:27.714885 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79dc84bdb7-4sx8m" Oct 02 21:47:27 crc kubenswrapper[4636]: I1002 21:47:27.790903 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pxq8v"] Oct 02 21:47:27 crc kubenswrapper[4636]: I1002 21:47:27.791146 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" podUID="26f39e78-df18-4c0d-9bf3-f29d11459ea9" containerName="dnsmasq-dns" containerID="cri-o://ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70" gracePeriod=10 Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.305536 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.422841 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-nb\") pod \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.422913 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-sb\") pod \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.422970 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-config\") pod \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.423002 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2kz2\" (UniqueName: \"kubernetes.io/projected/26f39e78-df18-4c0d-9bf3-f29d11459ea9-kube-api-access-n2kz2\") pod \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.423066 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-swift-storage-0\") pod \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.423095 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-svc\") pod \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.423157 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-openstack-edpm-ipam\") pod \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\" (UID: \"26f39e78-df18-4c0d-9bf3-f29d11459ea9\") " Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.428226 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26f39e78-df18-4c0d-9bf3-f29d11459ea9-kube-api-access-n2kz2" (OuterVolumeSpecName: "kube-api-access-n2kz2") pod "26f39e78-df18-4c0d-9bf3-f29d11459ea9" (UID: "26f39e78-df18-4c0d-9bf3-f29d11459ea9"). InnerVolumeSpecName "kube-api-access-n2kz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.502622 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "26f39e78-df18-4c0d-9bf3-f29d11459ea9" (UID: "26f39e78-df18-4c0d-9bf3-f29d11459ea9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.513288 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "26f39e78-df18-4c0d-9bf3-f29d11459ea9" (UID: "26f39e78-df18-4c0d-9bf3-f29d11459ea9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.515734 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "26f39e78-df18-4c0d-9bf3-f29d11459ea9" (UID: "26f39e78-df18-4c0d-9bf3-f29d11459ea9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.516695 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-config" (OuterVolumeSpecName: "config") pod "26f39e78-df18-4c0d-9bf3-f29d11459ea9" (UID: "26f39e78-df18-4c0d-9bf3-f29d11459ea9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.518013 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "26f39e78-df18-4c0d-9bf3-f29d11459ea9" (UID: "26f39e78-df18-4c0d-9bf3-f29d11459ea9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.525503 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2kz2\" (UniqueName: \"kubernetes.io/projected/26f39e78-df18-4c0d-9bf3-f29d11459ea9-kube-api-access-n2kz2\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.525532 4636 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.525542 4636 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.525549 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.525558 4636 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.525565 4636 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-config\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.552093 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "26f39e78-df18-4c0d-9bf3-f29d11459ea9" (UID: "26f39e78-df18-4c0d-9bf3-f29d11459ea9"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.608984 4636 generic.go:334] "Generic (PLEG): container finished" podID="26f39e78-df18-4c0d-9bf3-f29d11459ea9" containerID="ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70" exitCode=0 Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.609025 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" event={"ID":"26f39e78-df18-4c0d-9bf3-f29d11459ea9","Type":"ContainerDied","Data":"ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70"} Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.609049 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" event={"ID":"26f39e78-df18-4c0d-9bf3-f29d11459ea9","Type":"ContainerDied","Data":"d527e52270705f2dab38c66f7df41d9a6cab5a966bd58efe9c0e26e0850a43b8"} Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.609068 4636 scope.go:117] "RemoveContainer" containerID="ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.609507 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-pxq8v" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.626991 4636 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/26f39e78-df18-4c0d-9bf3-f29d11459ea9-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.629217 4636 scope.go:117] "RemoveContainer" containerID="10502b9822ccdae5ca9e3a107849571fbf6d5e9f3844686f583a027f84fb5176" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.644214 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pxq8v"] Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.654121 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pxq8v"] Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.666328 4636 scope.go:117] "RemoveContainer" containerID="ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70" Oct 02 21:47:28 crc kubenswrapper[4636]: E1002 21:47:28.666769 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70\": container with ID starting with ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70 not found: ID does not exist" containerID="ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.666801 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70"} err="failed to get container status \"ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70\": rpc error: code = NotFound desc = could not find container \"ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70\": container with ID starting with ae29b9a9526e5cd347c3fd504a42c7c85ec23ab839ba155d90b7dd16ff80ca70 not found: ID does not exist" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.666821 4636 scope.go:117] "RemoveContainer" containerID="10502b9822ccdae5ca9e3a107849571fbf6d5e9f3844686f583a027f84fb5176" Oct 02 21:47:28 crc kubenswrapper[4636]: E1002 21:47:28.667057 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10502b9822ccdae5ca9e3a107849571fbf6d5e9f3844686f583a027f84fb5176\": container with ID starting with 10502b9822ccdae5ca9e3a107849571fbf6d5e9f3844686f583a027f84fb5176 not found: ID does not exist" containerID="10502b9822ccdae5ca9e3a107849571fbf6d5e9f3844686f583a027f84fb5176" Oct 02 21:47:28 crc kubenswrapper[4636]: I1002 21:47:28.667076 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10502b9822ccdae5ca9e3a107849571fbf6d5e9f3844686f583a027f84fb5176"} err="failed to get container status \"10502b9822ccdae5ca9e3a107849571fbf6d5e9f3844686f583a027f84fb5176\": rpc error: code = NotFound desc = could not find container \"10502b9822ccdae5ca9e3a107849571fbf6d5e9f3844686f583a027f84fb5176\": container with ID starting with 10502b9822ccdae5ca9e3a107849571fbf6d5e9f3844686f583a027f84fb5176 not found: ID does not exist" Oct 02 21:47:29 crc kubenswrapper[4636]: I1002 21:47:29.623832 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26f39e78-df18-4c0d-9bf3-f29d11459ea9" path="/var/lib/kubelet/pods/26f39e78-df18-4c0d-9bf3-f29d11459ea9/volumes" Oct 02 21:47:38 crc kubenswrapper[4636]: I1002 21:47:38.706230 4636 generic.go:334] "Generic (PLEG): container finished" podID="379f5124-2304-4bd2-9765-757caebdd35a" containerID="36e80298b830f034e1a0b1bcdf2ff4ce6d947dd8f5a6427276d8d6fdaad82cbc" exitCode=0 Oct 02 21:47:38 crc kubenswrapper[4636]: I1002 21:47:38.706285 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"379f5124-2304-4bd2-9765-757caebdd35a","Type":"ContainerDied","Data":"36e80298b830f034e1a0b1bcdf2ff4ce6d947dd8f5a6427276d8d6fdaad82cbc"} Oct 02 21:47:39 crc kubenswrapper[4636]: I1002 21:47:39.720678 4636 generic.go:334] "Generic (PLEG): container finished" podID="8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73" containerID="33c237fa4fd9295527d77790f5ae55a45a81e0ab05ec0c5f5ff44470da589b0a" exitCode=0 Oct 02 21:47:39 crc kubenswrapper[4636]: I1002 21:47:39.720804 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73","Type":"ContainerDied","Data":"33c237fa4fd9295527d77790f5ae55a45a81e0ab05ec0c5f5ff44470da589b0a"} Oct 02 21:47:39 crc kubenswrapper[4636]: I1002 21:47:39.727313 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"379f5124-2304-4bd2-9765-757caebdd35a","Type":"ContainerStarted","Data":"d883dc05590efdda98179eb0670b96ae3ef2aa81ccc3d341b429a304d6b8681d"} Oct 02 21:47:39 crc kubenswrapper[4636]: I1002 21:47:39.728231 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 02 21:47:39 crc kubenswrapper[4636]: I1002 21:47:39.793788 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.793770273 podStartE2EDuration="36.793770273s" podCreationTimestamp="2025-10-02 21:47:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:47:39.783905184 +0000 UTC m=+1451.106913203" watchObservedRunningTime="2025-10-02 21:47:39.793770273 +0000 UTC m=+1451.116778292" Oct 02 21:47:40 crc kubenswrapper[4636]: I1002 21:47:40.748321 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73","Type":"ContainerStarted","Data":"5125e06025d66b16b711646b9271fc086bcd94e44e393e6dca41491afc16a393"} Oct 02 21:47:40 crc kubenswrapper[4636]: I1002 21:47:40.749467 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:47:40 crc kubenswrapper[4636]: I1002 21:47:40.781131 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.781109663 podStartE2EDuration="36.781109663s" podCreationTimestamp="2025-10-02 21:47:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 21:47:40.773271807 +0000 UTC m=+1452.096279846" watchObservedRunningTime="2025-10-02 21:47:40.781109663 +0000 UTC m=+1452.104117682" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.780812 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pphpp"] Oct 02 21:47:42 crc kubenswrapper[4636]: E1002 21:47:42.781633 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26f39e78-df18-4c0d-9bf3-f29d11459ea9" containerName="init" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.781650 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="26f39e78-df18-4c0d-9bf3-f29d11459ea9" containerName="init" Oct 02 21:47:42 crc kubenswrapper[4636]: E1002 21:47:42.781671 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26f39e78-df18-4c0d-9bf3-f29d11459ea9" containerName="dnsmasq-dns" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.781681 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="26f39e78-df18-4c0d-9bf3-f29d11459ea9" containerName="dnsmasq-dns" Oct 02 21:47:42 crc kubenswrapper[4636]: E1002 21:47:42.781700 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc3ee6cf-52bc-4586-b226-c40a74952c04" containerName="init" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.781709 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc3ee6cf-52bc-4586-b226-c40a74952c04" containerName="init" Oct 02 21:47:42 crc kubenswrapper[4636]: E1002 21:47:42.781723 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc3ee6cf-52bc-4586-b226-c40a74952c04" containerName="dnsmasq-dns" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.781730 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc3ee6cf-52bc-4586-b226-c40a74952c04" containerName="dnsmasq-dns" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.781979 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc3ee6cf-52bc-4586-b226-c40a74952c04" containerName="dnsmasq-dns" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.781996 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="26f39e78-df18-4c0d-9bf3-f29d11459ea9" containerName="dnsmasq-dns" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.783620 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.790883 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pphpp"] Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.894163 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-catalog-content\") pod \"redhat-operators-pphpp\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.894301 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vqsq\" (UniqueName: \"kubernetes.io/projected/2feb7989-8345-421c-aad9-0ee360f0fe80-kube-api-access-9vqsq\") pod \"redhat-operators-pphpp\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.894335 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-utilities\") pod \"redhat-operators-pphpp\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.995863 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vqsq\" (UniqueName: \"kubernetes.io/projected/2feb7989-8345-421c-aad9-0ee360f0fe80-kube-api-access-9vqsq\") pod \"redhat-operators-pphpp\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.995916 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-utilities\") pod \"redhat-operators-pphpp\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.995993 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-catalog-content\") pod \"redhat-operators-pphpp\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.996510 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-catalog-content\") pod \"redhat-operators-pphpp\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:42 crc kubenswrapper[4636]: I1002 21:47:42.996681 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-utilities\") pod \"redhat-operators-pphpp\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:43 crc kubenswrapper[4636]: I1002 21:47:43.025740 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vqsq\" (UniqueName: \"kubernetes.io/projected/2feb7989-8345-421c-aad9-0ee360f0fe80-kube-api-access-9vqsq\") pod \"redhat-operators-pphpp\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:43 crc kubenswrapper[4636]: I1002 21:47:43.103665 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:43 crc kubenswrapper[4636]: I1002 21:47:43.597595 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pphpp"] Oct 02 21:47:43 crc kubenswrapper[4636]: W1002 21:47:43.608296 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2feb7989_8345_421c_aad9_0ee360f0fe80.slice/crio-495da92dc67f96e5cf2e6990ed1a4dce435164d7cedaba94e224ed28bd008ec6 WatchSource:0}: Error finding container 495da92dc67f96e5cf2e6990ed1a4dce435164d7cedaba94e224ed28bd008ec6: Status 404 returned error can't find the container with id 495da92dc67f96e5cf2e6990ed1a4dce435164d7cedaba94e224ed28bd008ec6 Oct 02 21:47:43 crc kubenswrapper[4636]: I1002 21:47:43.775808 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pphpp" event={"ID":"2feb7989-8345-421c-aad9-0ee360f0fe80","Type":"ContainerStarted","Data":"495da92dc67f96e5cf2e6990ed1a4dce435164d7cedaba94e224ed28bd008ec6"} Oct 02 21:47:44 crc kubenswrapper[4636]: I1002 21:47:44.788240 4636 generic.go:334] "Generic (PLEG): container finished" podID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerID="dee52b8d15946066c47b3f0ec20bfcf6361f040e7052ef65d651a4631829c8b9" exitCode=0 Oct 02 21:47:44 crc kubenswrapper[4636]: I1002 21:47:44.788672 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pphpp" event={"ID":"2feb7989-8345-421c-aad9-0ee360f0fe80","Type":"ContainerDied","Data":"dee52b8d15946066c47b3f0ec20bfcf6361f040e7052ef65d651a4631829c8b9"} Oct 02 21:47:45 crc kubenswrapper[4636]: I1002 21:47:45.804228 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pphpp" event={"ID":"2feb7989-8345-421c-aad9-0ee360f0fe80","Type":"ContainerStarted","Data":"0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83"} Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.715492 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw"] Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.716626 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.720647 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.720854 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.721084 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.721203 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.745177 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw"] Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.765372 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.765673 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzb4l\" (UniqueName: \"kubernetes.io/projected/58767458-ad30-4ec0-aa42-0a6d9634d72d-kube-api-access-qzb4l\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.765708 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.765799 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.867467 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.867525 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzb4l\" (UniqueName: \"kubernetes.io/projected/58767458-ad30-4ec0-aa42-0a6d9634d72d-kube-api-access-qzb4l\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.867554 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.867653 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.873103 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.873561 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.881789 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:46 crc kubenswrapper[4636]: I1002 21:47:46.914093 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzb4l\" (UniqueName: \"kubernetes.io/projected/58767458-ad30-4ec0-aa42-0a6d9634d72d-kube-api-access-qzb4l\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:47 crc kubenswrapper[4636]: I1002 21:47:47.034212 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:47:47 crc kubenswrapper[4636]: I1002 21:47:47.741123 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw"] Oct 02 21:47:47 crc kubenswrapper[4636]: I1002 21:47:47.830195 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" event={"ID":"58767458-ad30-4ec0-aa42-0a6d9634d72d","Type":"ContainerStarted","Data":"514ca82e4f954e4c461841133b309b6c3249137f5b08f2655919d8b3759b9a0b"} Oct 02 21:47:49 crc kubenswrapper[4636]: I1002 21:47:49.859726 4636 generic.go:334] "Generic (PLEG): container finished" podID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerID="0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83" exitCode=0 Oct 02 21:47:49 crc kubenswrapper[4636]: I1002 21:47:49.859886 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pphpp" event={"ID":"2feb7989-8345-421c-aad9-0ee360f0fe80","Type":"ContainerDied","Data":"0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83"} Oct 02 21:47:50 crc kubenswrapper[4636]: I1002 21:47:50.871719 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pphpp" event={"ID":"2feb7989-8345-421c-aad9-0ee360f0fe80","Type":"ContainerStarted","Data":"1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd"} Oct 02 21:47:50 crc kubenswrapper[4636]: I1002 21:47:50.897670 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pphpp" podStartSLOduration=3.158063437 podStartE2EDuration="8.897650415s" podCreationTimestamp="2025-10-02 21:47:42 +0000 UTC" firstStartedPulling="2025-10-02 21:47:44.790028413 +0000 UTC m=+1456.113036432" lastFinishedPulling="2025-10-02 21:47:50.529615391 +0000 UTC m=+1461.852623410" observedRunningTime="2025-10-02 21:47:50.893734852 +0000 UTC m=+1462.216742891" watchObservedRunningTime="2025-10-02 21:47:50.897650415 +0000 UTC m=+1462.220658444" Oct 02 21:47:53 crc kubenswrapper[4636]: I1002 21:47:53.104559 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:53 crc kubenswrapper[4636]: I1002 21:47:53.105053 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:47:53 crc kubenswrapper[4636]: I1002 21:47:53.117135 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:47:53 crc kubenswrapper[4636]: I1002 21:47:53.117189 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:47:53 crc kubenswrapper[4636]: I1002 21:47:53.959972 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 02 21:47:54 crc kubenswrapper[4636]: I1002 21:47:54.159685 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pphpp" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerName="registry-server" probeResult="failure" output=< Oct 02 21:47:54 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 21:47:54 crc kubenswrapper[4636]: > Oct 02 21:47:54 crc kubenswrapper[4636]: I1002 21:47:54.956929 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 02 21:48:02 crc kubenswrapper[4636]: I1002 21:48:02.994695 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" event={"ID":"58767458-ad30-4ec0-aa42-0a6d9634d72d","Type":"ContainerStarted","Data":"e7de6fdf8be431b37fe096d7aa2d1384960169eb933a37eb608bbf4e39a23c9e"} Oct 02 21:48:03 crc kubenswrapper[4636]: I1002 21:48:03.483589 4636 scope.go:117] "RemoveContainer" containerID="b46ff96ed1eda16567b38e61b3d8548936dfa92e2b86f859d0d44bc82bb6025c" Oct 02 21:48:04 crc kubenswrapper[4636]: I1002 21:48:04.152126 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pphpp" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerName="registry-server" probeResult="failure" output=< Oct 02 21:48:04 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 21:48:04 crc kubenswrapper[4636]: > Oct 02 21:48:14 crc kubenswrapper[4636]: I1002 21:48:14.152814 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pphpp" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerName="registry-server" probeResult="failure" output=< Oct 02 21:48:14 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 21:48:14 crc kubenswrapper[4636]: > Oct 02 21:48:15 crc kubenswrapper[4636]: I1002 21:48:15.104298 4636 generic.go:334] "Generic (PLEG): container finished" podID="58767458-ad30-4ec0-aa42-0a6d9634d72d" containerID="e7de6fdf8be431b37fe096d7aa2d1384960169eb933a37eb608bbf4e39a23c9e" exitCode=0 Oct 02 21:48:15 crc kubenswrapper[4636]: I1002 21:48:15.104378 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" event={"ID":"58767458-ad30-4ec0-aa42-0a6d9634d72d","Type":"ContainerDied","Data":"e7de6fdf8be431b37fe096d7aa2d1384960169eb933a37eb608bbf4e39a23c9e"} Oct 02 21:48:16 crc kubenswrapper[4636]: I1002 21:48:16.995262 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.139842 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" event={"ID":"58767458-ad30-4ec0-aa42-0a6d9634d72d","Type":"ContainerDied","Data":"514ca82e4f954e4c461841133b309b6c3249137f5b08f2655919d8b3759b9a0b"} Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.139896 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="514ca82e4f954e4c461841133b309b6c3249137f5b08f2655919d8b3759b9a0b" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.139973 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.147416 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-inventory\") pod \"58767458-ad30-4ec0-aa42-0a6d9634d72d\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.147514 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-ssh-key\") pod \"58767458-ad30-4ec0-aa42-0a6d9634d72d\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.147786 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzb4l\" (UniqueName: \"kubernetes.io/projected/58767458-ad30-4ec0-aa42-0a6d9634d72d-kube-api-access-qzb4l\") pod \"58767458-ad30-4ec0-aa42-0a6d9634d72d\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.147921 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-repo-setup-combined-ca-bundle\") pod \"58767458-ad30-4ec0-aa42-0a6d9634d72d\" (UID: \"58767458-ad30-4ec0-aa42-0a6d9634d72d\") " Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.182303 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "58767458-ad30-4ec0-aa42-0a6d9634d72d" (UID: "58767458-ad30-4ec0-aa42-0a6d9634d72d"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.182944 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58767458-ad30-4ec0-aa42-0a6d9634d72d-kube-api-access-qzb4l" (OuterVolumeSpecName: "kube-api-access-qzb4l") pod "58767458-ad30-4ec0-aa42-0a6d9634d72d" (UID: "58767458-ad30-4ec0-aa42-0a6d9634d72d"). InnerVolumeSpecName "kube-api-access-qzb4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.209729 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-inventory" (OuterVolumeSpecName: "inventory") pod "58767458-ad30-4ec0-aa42-0a6d9634d72d" (UID: "58767458-ad30-4ec0-aa42-0a6d9634d72d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.209803 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "58767458-ad30-4ec0-aa42-0a6d9634d72d" (UID: "58767458-ad30-4ec0-aa42-0a6d9634d72d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.225598 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh"] Oct 02 21:48:17 crc kubenswrapper[4636]: E1002 21:48:17.226153 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58767458-ad30-4ec0-aa42-0a6d9634d72d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.226177 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="58767458-ad30-4ec0-aa42-0a6d9634d72d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.226351 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="58767458-ad30-4ec0-aa42-0a6d9634d72d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.226987 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.234008 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh"] Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.262681 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.262784 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.262801 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzb4l\" (UniqueName: \"kubernetes.io/projected/58767458-ad30-4ec0-aa42-0a6d9634d72d-kube-api-access-qzb4l\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.262816 4636 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58767458-ad30-4ec0-aa42-0a6d9634d72d-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.364446 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tnvgh\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.364513 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tnvgh\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.364623 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kbkv\" (UniqueName: \"kubernetes.io/projected/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-kube-api-access-4kbkv\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tnvgh\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.467466 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tnvgh\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.467555 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tnvgh\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.467670 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kbkv\" (UniqueName: \"kubernetes.io/projected/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-kube-api-access-4kbkv\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tnvgh\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.471088 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tnvgh\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.471090 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tnvgh\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.485155 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kbkv\" (UniqueName: \"kubernetes.io/projected/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-kube-api-access-4kbkv\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tnvgh\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.616831 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:17 crc kubenswrapper[4636]: I1002 21:48:17.980711 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh"] Oct 02 21:48:18 crc kubenswrapper[4636]: I1002 21:48:18.150576 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" event={"ID":"4dd25cb2-7e73-449a-aa3b-2ff97702dad2","Type":"ContainerStarted","Data":"69090a3ff5f41d1f41df752ccfb137efa5823f28ca1432bd69ed7a0685836f26"} Oct 02 21:48:19 crc kubenswrapper[4636]: I1002 21:48:19.160375 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" event={"ID":"4dd25cb2-7e73-449a-aa3b-2ff97702dad2","Type":"ContainerStarted","Data":"34b75053c8c96eb7dc0a40c2cb45673497e2dbae4d06919b7bb6b7a418295643"} Oct 02 21:48:19 crc kubenswrapper[4636]: I1002 21:48:19.181307 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" podStartSLOduration=2.004586693 podStartE2EDuration="2.181287409s" podCreationTimestamp="2025-10-02 21:48:17 +0000 UTC" firstStartedPulling="2025-10-02 21:48:17.98582515 +0000 UTC m=+1489.308833169" lastFinishedPulling="2025-10-02 21:48:18.162525866 +0000 UTC m=+1489.485533885" observedRunningTime="2025-10-02 21:48:19.176910824 +0000 UTC m=+1490.499918843" watchObservedRunningTime="2025-10-02 21:48:19.181287409 +0000 UTC m=+1490.504295428" Oct 02 21:48:21 crc kubenswrapper[4636]: I1002 21:48:21.179110 4636 generic.go:334] "Generic (PLEG): container finished" podID="4dd25cb2-7e73-449a-aa3b-2ff97702dad2" containerID="34b75053c8c96eb7dc0a40c2cb45673497e2dbae4d06919b7bb6b7a418295643" exitCode=0 Oct 02 21:48:21 crc kubenswrapper[4636]: I1002 21:48:21.179197 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" event={"ID":"4dd25cb2-7e73-449a-aa3b-2ff97702dad2","Type":"ContainerDied","Data":"34b75053c8c96eb7dc0a40c2cb45673497e2dbae4d06919b7bb6b7a418295643"} Oct 02 21:48:22 crc kubenswrapper[4636]: I1002 21:48:22.577852 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:22 crc kubenswrapper[4636]: I1002 21:48:22.672634 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-inventory\") pod \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " Oct 02 21:48:22 crc kubenswrapper[4636]: I1002 21:48:22.672672 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-ssh-key\") pod \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " Oct 02 21:48:22 crc kubenswrapper[4636]: I1002 21:48:22.672707 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kbkv\" (UniqueName: \"kubernetes.io/projected/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-kube-api-access-4kbkv\") pod \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\" (UID: \"4dd25cb2-7e73-449a-aa3b-2ff97702dad2\") " Oct 02 21:48:22 crc kubenswrapper[4636]: I1002 21:48:22.681114 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-kube-api-access-4kbkv" (OuterVolumeSpecName: "kube-api-access-4kbkv") pod "4dd25cb2-7e73-449a-aa3b-2ff97702dad2" (UID: "4dd25cb2-7e73-449a-aa3b-2ff97702dad2"). InnerVolumeSpecName "kube-api-access-4kbkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:48:22 crc kubenswrapper[4636]: I1002 21:48:22.701626 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-inventory" (OuterVolumeSpecName: "inventory") pod "4dd25cb2-7e73-449a-aa3b-2ff97702dad2" (UID: "4dd25cb2-7e73-449a-aa3b-2ff97702dad2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:48:22 crc kubenswrapper[4636]: I1002 21:48:22.705457 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4dd25cb2-7e73-449a-aa3b-2ff97702dad2" (UID: "4dd25cb2-7e73-449a-aa3b-2ff97702dad2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:48:22 crc kubenswrapper[4636]: I1002 21:48:22.776530 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:22 crc kubenswrapper[4636]: I1002 21:48:22.776560 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:22 crc kubenswrapper[4636]: I1002 21:48:22.776570 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kbkv\" (UniqueName: \"kubernetes.io/projected/4dd25cb2-7e73-449a-aa3b-2ff97702dad2-kube-api-access-4kbkv\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.117939 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.118403 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.157369 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.225795 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.230259 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.231594 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tnvgh" event={"ID":"4dd25cb2-7e73-449a-aa3b-2ff97702dad2","Type":"ContainerDied","Data":"69090a3ff5f41d1f41df752ccfb137efa5823f28ca1432bd69ed7a0685836f26"} Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.231646 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69090a3ff5f41d1f41df752ccfb137efa5823f28ca1432bd69ed7a0685836f26" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.305325 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn"] Oct 02 21:48:23 crc kubenswrapper[4636]: E1002 21:48:23.305823 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dd25cb2-7e73-449a-aa3b-2ff97702dad2" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.305867 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dd25cb2-7e73-449a-aa3b-2ff97702dad2" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.306093 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dd25cb2-7e73-449a-aa3b-2ff97702dad2" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.306778 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.309871 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.309977 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.310220 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.310356 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.324113 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn"] Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.389895 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.389994 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.390029 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.394091 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dfv5\" (UniqueName: \"kubernetes.io/projected/66588ffc-e8a9-4ced-a324-c9d436880e52-kube-api-access-6dfv5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.402436 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pphpp"] Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.496588 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.497228 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dfv5\" (UniqueName: \"kubernetes.io/projected/66588ffc-e8a9-4ced-a324-c9d436880e52-kube-api-access-6dfv5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.497539 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.497801 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.501210 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.509687 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.509682 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.516713 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dfv5\" (UniqueName: \"kubernetes.io/projected/66588ffc-e8a9-4ced-a324-c9d436880e52-kube-api-access-6dfv5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:23 crc kubenswrapper[4636]: I1002 21:48:23.634498 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.162685 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn"] Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.238232 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pphpp" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerName="registry-server" containerID="cri-o://1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd" gracePeriod=2 Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.238800 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" event={"ID":"66588ffc-e8a9-4ced-a324-c9d436880e52","Type":"ContainerStarted","Data":"5e3ae3025e727ec22a97153cefb9e5df4a298e79ba954baf4c901a06da26a9b9"} Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.676415 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.825947 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-catalog-content\") pod \"2feb7989-8345-421c-aad9-0ee360f0fe80\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.826357 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vqsq\" (UniqueName: \"kubernetes.io/projected/2feb7989-8345-421c-aad9-0ee360f0fe80-kube-api-access-9vqsq\") pod \"2feb7989-8345-421c-aad9-0ee360f0fe80\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.826447 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-utilities\") pod \"2feb7989-8345-421c-aad9-0ee360f0fe80\" (UID: \"2feb7989-8345-421c-aad9-0ee360f0fe80\") " Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.827468 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-utilities" (OuterVolumeSpecName: "utilities") pod "2feb7989-8345-421c-aad9-0ee360f0fe80" (UID: "2feb7989-8345-421c-aad9-0ee360f0fe80"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.836468 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2feb7989-8345-421c-aad9-0ee360f0fe80-kube-api-access-9vqsq" (OuterVolumeSpecName: "kube-api-access-9vqsq") pod "2feb7989-8345-421c-aad9-0ee360f0fe80" (UID: "2feb7989-8345-421c-aad9-0ee360f0fe80"). InnerVolumeSpecName "kube-api-access-9vqsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.921123 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2feb7989-8345-421c-aad9-0ee360f0fe80" (UID: "2feb7989-8345-421c-aad9-0ee360f0fe80"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.928627 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.928658 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vqsq\" (UniqueName: \"kubernetes.io/projected/2feb7989-8345-421c-aad9-0ee360f0fe80-kube-api-access-9vqsq\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:24 crc kubenswrapper[4636]: I1002 21:48:24.928670 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2feb7989-8345-421c-aad9-0ee360f0fe80-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.254736 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" event={"ID":"66588ffc-e8a9-4ced-a324-c9d436880e52","Type":"ContainerStarted","Data":"cbe70435eabea61bac2b751104015a1236d017477e90c063a3a50330dd341ed5"} Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.259372 4636 generic.go:334] "Generic (PLEG): container finished" podID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerID="1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd" exitCode=0 Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.259429 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pphpp" event={"ID":"2feb7989-8345-421c-aad9-0ee360f0fe80","Type":"ContainerDied","Data":"1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd"} Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.259627 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pphpp" event={"ID":"2feb7989-8345-421c-aad9-0ee360f0fe80","Type":"ContainerDied","Data":"495da92dc67f96e5cf2e6990ed1a4dce435164d7cedaba94e224ed28bd008ec6"} Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.259736 4636 scope.go:117] "RemoveContainer" containerID="1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.259487 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pphpp" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.280344 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" podStartSLOduration=2.099514504 podStartE2EDuration="2.280322776s" podCreationTimestamp="2025-10-02 21:48:23 +0000 UTC" firstStartedPulling="2025-10-02 21:48:24.162380321 +0000 UTC m=+1495.485388340" lastFinishedPulling="2025-10-02 21:48:24.343188593 +0000 UTC m=+1495.666196612" observedRunningTime="2025-10-02 21:48:25.27017931 +0000 UTC m=+1496.593187339" watchObservedRunningTime="2025-10-02 21:48:25.280322776 +0000 UTC m=+1496.603330805" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.303997 4636 scope.go:117] "RemoveContainer" containerID="0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.314083 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pphpp"] Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.324693 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pphpp"] Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.328994 4636 scope.go:117] "RemoveContainer" containerID="dee52b8d15946066c47b3f0ec20bfcf6361f040e7052ef65d651a4631829c8b9" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.371008 4636 scope.go:117] "RemoveContainer" containerID="1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd" Oct 02 21:48:25 crc kubenswrapper[4636]: E1002 21:48:25.371395 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd\": container with ID starting with 1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd not found: ID does not exist" containerID="1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.371434 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd"} err="failed to get container status \"1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd\": rpc error: code = NotFound desc = could not find container \"1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd\": container with ID starting with 1e294573af008df1d8a4c1ccb9d721f1b81bdec0c1bde2d7ee5d09bdb899d8bd not found: ID does not exist" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.371460 4636 scope.go:117] "RemoveContainer" containerID="0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83" Oct 02 21:48:25 crc kubenswrapper[4636]: E1002 21:48:25.371744 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83\": container with ID starting with 0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83 not found: ID does not exist" containerID="0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.371792 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83"} err="failed to get container status \"0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83\": rpc error: code = NotFound desc = could not find container \"0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83\": container with ID starting with 0493b5570bba77e13362e89bf24f89c67013873fc51522636a53c266136dcc83 not found: ID does not exist" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.371818 4636 scope.go:117] "RemoveContainer" containerID="dee52b8d15946066c47b3f0ec20bfcf6361f040e7052ef65d651a4631829c8b9" Oct 02 21:48:25 crc kubenswrapper[4636]: E1002 21:48:25.373488 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dee52b8d15946066c47b3f0ec20bfcf6361f040e7052ef65d651a4631829c8b9\": container with ID starting with dee52b8d15946066c47b3f0ec20bfcf6361f040e7052ef65d651a4631829c8b9 not found: ID does not exist" containerID="dee52b8d15946066c47b3f0ec20bfcf6361f040e7052ef65d651a4631829c8b9" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.373515 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dee52b8d15946066c47b3f0ec20bfcf6361f040e7052ef65d651a4631829c8b9"} err="failed to get container status \"dee52b8d15946066c47b3f0ec20bfcf6361f040e7052ef65d651a4631829c8b9\": rpc error: code = NotFound desc = could not find container \"dee52b8d15946066c47b3f0ec20bfcf6361f040e7052ef65d651a4631829c8b9\": container with ID starting with dee52b8d15946066c47b3f0ec20bfcf6361f040e7052ef65d651a4631829c8b9 not found: ID does not exist" Oct 02 21:48:25 crc kubenswrapper[4636]: I1002 21:48:25.617607 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" path="/var/lib/kubelet/pods/2feb7989-8345-421c-aad9-0ee360f0fe80/volumes" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.620508 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rldxs"] Oct 02 21:48:26 crc kubenswrapper[4636]: E1002 21:48:26.623527 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerName="extract-content" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.623564 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerName="extract-content" Oct 02 21:48:26 crc kubenswrapper[4636]: E1002 21:48:26.623580 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerName="registry-server" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.623589 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerName="registry-server" Oct 02 21:48:26 crc kubenswrapper[4636]: E1002 21:48:26.623625 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerName="extract-utilities" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.623636 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerName="extract-utilities" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.623932 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="2feb7989-8345-421c-aad9-0ee360f0fe80" containerName="registry-server" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.626186 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.634860 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rldxs"] Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.778235 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-catalog-content\") pod \"community-operators-rldxs\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.778279 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-utilities\") pod \"community-operators-rldxs\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.778322 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhxzb\" (UniqueName: \"kubernetes.io/projected/b4da00e7-ba41-41c2-acb7-38cf099662fc-kube-api-access-lhxzb\") pod \"community-operators-rldxs\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.880650 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-catalog-content\") pod \"community-operators-rldxs\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.880710 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-utilities\") pod \"community-operators-rldxs\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.880772 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhxzb\" (UniqueName: \"kubernetes.io/projected/b4da00e7-ba41-41c2-acb7-38cf099662fc-kube-api-access-lhxzb\") pod \"community-operators-rldxs\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.881207 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-utilities\") pod \"community-operators-rldxs\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.881232 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-catalog-content\") pod \"community-operators-rldxs\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.911692 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhxzb\" (UniqueName: \"kubernetes.io/projected/b4da00e7-ba41-41c2-acb7-38cf099662fc-kube-api-access-lhxzb\") pod \"community-operators-rldxs\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:26 crc kubenswrapper[4636]: I1002 21:48:26.950456 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:27 crc kubenswrapper[4636]: I1002 21:48:27.390355 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rldxs"] Oct 02 21:48:28 crc kubenswrapper[4636]: I1002 21:48:28.302282 4636 generic.go:334] "Generic (PLEG): container finished" podID="b4da00e7-ba41-41c2-acb7-38cf099662fc" containerID="956d0a50043397b966aa7f592257af7bde4410a278287daf05f5d5c80ce1bf31" exitCode=0 Oct 02 21:48:28 crc kubenswrapper[4636]: I1002 21:48:28.302395 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rldxs" event={"ID":"b4da00e7-ba41-41c2-acb7-38cf099662fc","Type":"ContainerDied","Data":"956d0a50043397b966aa7f592257af7bde4410a278287daf05f5d5c80ce1bf31"} Oct 02 21:48:28 crc kubenswrapper[4636]: I1002 21:48:28.303451 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rldxs" event={"ID":"b4da00e7-ba41-41c2-acb7-38cf099662fc","Type":"ContainerStarted","Data":"12343e2d5e5a113083a3209ff59141bcf5d7c80898e7f45e64323dd2ed5551b9"} Oct 02 21:48:30 crc kubenswrapper[4636]: I1002 21:48:30.324555 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rldxs" event={"ID":"b4da00e7-ba41-41c2-acb7-38cf099662fc","Type":"ContainerStarted","Data":"2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146"} Oct 02 21:48:31 crc kubenswrapper[4636]: I1002 21:48:31.335564 4636 generic.go:334] "Generic (PLEG): container finished" podID="b4da00e7-ba41-41c2-acb7-38cf099662fc" containerID="2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146" exitCode=0 Oct 02 21:48:31 crc kubenswrapper[4636]: I1002 21:48:31.335612 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rldxs" event={"ID":"b4da00e7-ba41-41c2-acb7-38cf099662fc","Type":"ContainerDied","Data":"2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146"} Oct 02 21:48:32 crc kubenswrapper[4636]: I1002 21:48:32.347320 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rldxs" event={"ID":"b4da00e7-ba41-41c2-acb7-38cf099662fc","Type":"ContainerStarted","Data":"27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e"} Oct 02 21:48:32 crc kubenswrapper[4636]: I1002 21:48:32.371629 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rldxs" podStartSLOduration=2.94475126 podStartE2EDuration="6.371605941s" podCreationTimestamp="2025-10-02 21:48:26 +0000 UTC" firstStartedPulling="2025-10-02 21:48:28.304425413 +0000 UTC m=+1499.627433432" lastFinishedPulling="2025-10-02 21:48:31.731280094 +0000 UTC m=+1503.054288113" observedRunningTime="2025-10-02 21:48:32.36432006 +0000 UTC m=+1503.687328099" watchObservedRunningTime="2025-10-02 21:48:32.371605941 +0000 UTC m=+1503.694613960" Oct 02 21:48:36 crc kubenswrapper[4636]: I1002 21:48:36.950892 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:36 crc kubenswrapper[4636]: I1002 21:48:36.951411 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:37 crc kubenswrapper[4636]: I1002 21:48:37.005459 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:37 crc kubenswrapper[4636]: I1002 21:48:37.439131 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:37 crc kubenswrapper[4636]: I1002 21:48:37.482031 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rldxs"] Oct 02 21:48:39 crc kubenswrapper[4636]: I1002 21:48:39.407297 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rldxs" podUID="b4da00e7-ba41-41c2-acb7-38cf099662fc" containerName="registry-server" containerID="cri-o://27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e" gracePeriod=2 Oct 02 21:48:39 crc kubenswrapper[4636]: I1002 21:48:39.884986 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.023159 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-catalog-content\") pod \"b4da00e7-ba41-41c2-acb7-38cf099662fc\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.023269 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-utilities\") pod \"b4da00e7-ba41-41c2-acb7-38cf099662fc\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.023396 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhxzb\" (UniqueName: \"kubernetes.io/projected/b4da00e7-ba41-41c2-acb7-38cf099662fc-kube-api-access-lhxzb\") pod \"b4da00e7-ba41-41c2-acb7-38cf099662fc\" (UID: \"b4da00e7-ba41-41c2-acb7-38cf099662fc\") " Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.024862 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-utilities" (OuterVolumeSpecName: "utilities") pod "b4da00e7-ba41-41c2-acb7-38cf099662fc" (UID: "b4da00e7-ba41-41c2-acb7-38cf099662fc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.031514 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4da00e7-ba41-41c2-acb7-38cf099662fc-kube-api-access-lhxzb" (OuterVolumeSpecName: "kube-api-access-lhxzb") pod "b4da00e7-ba41-41c2-acb7-38cf099662fc" (UID: "b4da00e7-ba41-41c2-acb7-38cf099662fc"). InnerVolumeSpecName "kube-api-access-lhxzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.075922 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4da00e7-ba41-41c2-acb7-38cf099662fc" (UID: "b4da00e7-ba41-41c2-acb7-38cf099662fc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.127311 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.127347 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhxzb\" (UniqueName: \"kubernetes.io/projected/b4da00e7-ba41-41c2-acb7-38cf099662fc-kube-api-access-lhxzb\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.127359 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4da00e7-ba41-41c2-acb7-38cf099662fc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.428951 4636 generic.go:334] "Generic (PLEG): container finished" podID="b4da00e7-ba41-41c2-acb7-38cf099662fc" containerID="27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e" exitCode=0 Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.429224 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rldxs" event={"ID":"b4da00e7-ba41-41c2-acb7-38cf099662fc","Type":"ContainerDied","Data":"27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e"} Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.429250 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rldxs" event={"ID":"b4da00e7-ba41-41c2-acb7-38cf099662fc","Type":"ContainerDied","Data":"12343e2d5e5a113083a3209ff59141bcf5d7c80898e7f45e64323dd2ed5551b9"} Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.429267 4636 scope.go:117] "RemoveContainer" containerID="27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.429384 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rldxs" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.481605 4636 scope.go:117] "RemoveContainer" containerID="2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.501816 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rldxs"] Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.526168 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rldxs"] Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.577916 4636 scope.go:117] "RemoveContainer" containerID="956d0a50043397b966aa7f592257af7bde4410a278287daf05f5d5c80ce1bf31" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.628897 4636 scope.go:117] "RemoveContainer" containerID="27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e" Oct 02 21:48:40 crc kubenswrapper[4636]: E1002 21:48:40.632954 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e\": container with ID starting with 27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e not found: ID does not exist" containerID="27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.632993 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e"} err="failed to get container status \"27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e\": rpc error: code = NotFound desc = could not find container \"27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e\": container with ID starting with 27ea5663f6347d1d8b2aed3cdfdfdff039dfb97ec50b5b72175f5ed46fe1033e not found: ID does not exist" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.633018 4636 scope.go:117] "RemoveContainer" containerID="2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146" Oct 02 21:48:40 crc kubenswrapper[4636]: E1002 21:48:40.640111 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146\": container with ID starting with 2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146 not found: ID does not exist" containerID="2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.640151 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146"} err="failed to get container status \"2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146\": rpc error: code = NotFound desc = could not find container \"2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146\": container with ID starting with 2a6d55eb01bfc5ed3f8f44d15d9fa774e6a72d01e590e1a2852767a2033ef146 not found: ID does not exist" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.640177 4636 scope.go:117] "RemoveContainer" containerID="956d0a50043397b966aa7f592257af7bde4410a278287daf05f5d5c80ce1bf31" Oct 02 21:48:40 crc kubenswrapper[4636]: E1002 21:48:40.641382 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"956d0a50043397b966aa7f592257af7bde4410a278287daf05f5d5c80ce1bf31\": container with ID starting with 956d0a50043397b966aa7f592257af7bde4410a278287daf05f5d5c80ce1bf31 not found: ID does not exist" containerID="956d0a50043397b966aa7f592257af7bde4410a278287daf05f5d5c80ce1bf31" Oct 02 21:48:40 crc kubenswrapper[4636]: I1002 21:48:40.641414 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"956d0a50043397b966aa7f592257af7bde4410a278287daf05f5d5c80ce1bf31"} err="failed to get container status \"956d0a50043397b966aa7f592257af7bde4410a278287daf05f5d5c80ce1bf31\": rpc error: code = NotFound desc = could not find container \"956d0a50043397b966aa7f592257af7bde4410a278287daf05f5d5c80ce1bf31\": container with ID starting with 956d0a50043397b966aa7f592257af7bde4410a278287daf05f5d5c80ce1bf31 not found: ID does not exist" Oct 02 21:48:41 crc kubenswrapper[4636]: I1002 21:48:41.614933 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4da00e7-ba41-41c2-acb7-38cf099662fc" path="/var/lib/kubelet/pods/b4da00e7-ba41-41c2-acb7-38cf099662fc/volumes" Oct 02 21:48:53 crc kubenswrapper[4636]: I1002 21:48:53.117580 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:48:53 crc kubenswrapper[4636]: I1002 21:48:53.118228 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:48:53 crc kubenswrapper[4636]: I1002 21:48:53.118289 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:48:53 crc kubenswrapper[4636]: I1002 21:48:53.119311 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 21:48:53 crc kubenswrapper[4636]: I1002 21:48:53.119382 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" gracePeriod=600 Oct 02 21:48:53 crc kubenswrapper[4636]: E1002 21:48:53.242646 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:48:53 crc kubenswrapper[4636]: I1002 21:48:53.548841 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" exitCode=0 Oct 02 21:48:53 crc kubenswrapper[4636]: I1002 21:48:53.548887 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416"} Oct 02 21:48:53 crc kubenswrapper[4636]: I1002 21:48:53.548953 4636 scope.go:117] "RemoveContainer" containerID="8eca633a881fe1c5c0ea771b3040511454688e9d05e626e17792bacf3c7ae736" Oct 02 21:48:53 crc kubenswrapper[4636]: I1002 21:48:53.549685 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:48:53 crc kubenswrapper[4636]: E1002 21:48:53.550042 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:49:03 crc kubenswrapper[4636]: I1002 21:49:03.598079 4636 scope.go:117] "RemoveContainer" containerID="9b016d3fc43aa0958607cf0a682fe15b4abc5a7ec5976eeb632384423e28e86b" Oct 02 21:49:08 crc kubenswrapper[4636]: I1002 21:49:08.604796 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:49:08 crc kubenswrapper[4636]: E1002 21:49:08.605830 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.523922 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mtfxk"] Oct 02 21:49:11 crc kubenswrapper[4636]: E1002 21:49:11.525060 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4da00e7-ba41-41c2-acb7-38cf099662fc" containerName="extract-utilities" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.525160 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4da00e7-ba41-41c2-acb7-38cf099662fc" containerName="extract-utilities" Oct 02 21:49:11 crc kubenswrapper[4636]: E1002 21:49:11.525199 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4da00e7-ba41-41c2-acb7-38cf099662fc" containerName="registry-server" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.525213 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4da00e7-ba41-41c2-acb7-38cf099662fc" containerName="registry-server" Oct 02 21:49:11 crc kubenswrapper[4636]: E1002 21:49:11.525261 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4da00e7-ba41-41c2-acb7-38cf099662fc" containerName="extract-content" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.525276 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4da00e7-ba41-41c2-acb7-38cf099662fc" containerName="extract-content" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.525827 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4da00e7-ba41-41c2-acb7-38cf099662fc" containerName="registry-server" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.528574 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.536141 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mtfxk"] Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.628679 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87npg\" (UniqueName: \"kubernetes.io/projected/526f107a-7e75-492d-a0f8-79b4af330743-kube-api-access-87npg\") pod \"redhat-marketplace-mtfxk\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.628774 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-catalog-content\") pod \"redhat-marketplace-mtfxk\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.628828 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-utilities\") pod \"redhat-marketplace-mtfxk\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.730569 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87npg\" (UniqueName: \"kubernetes.io/projected/526f107a-7e75-492d-a0f8-79b4af330743-kube-api-access-87npg\") pod \"redhat-marketplace-mtfxk\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.730648 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-catalog-content\") pod \"redhat-marketplace-mtfxk\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.730712 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-utilities\") pod \"redhat-marketplace-mtfxk\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.731238 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-utilities\") pod \"redhat-marketplace-mtfxk\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.731463 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-catalog-content\") pod \"redhat-marketplace-mtfxk\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.754731 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87npg\" (UniqueName: \"kubernetes.io/projected/526f107a-7e75-492d-a0f8-79b4af330743-kube-api-access-87npg\") pod \"redhat-marketplace-mtfxk\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:11 crc kubenswrapper[4636]: I1002 21:49:11.865472 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:12 crc kubenswrapper[4636]: I1002 21:49:12.370453 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mtfxk"] Oct 02 21:49:12 crc kubenswrapper[4636]: I1002 21:49:12.755896 4636 generic.go:334] "Generic (PLEG): container finished" podID="526f107a-7e75-492d-a0f8-79b4af330743" containerID="b5506b5d9c81b80f41e4f2da46261ce1a1d5b2db2895ac0844370a9d8589d368" exitCode=0 Oct 02 21:49:12 crc kubenswrapper[4636]: I1002 21:49:12.756087 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mtfxk" event={"ID":"526f107a-7e75-492d-a0f8-79b4af330743","Type":"ContainerDied","Data":"b5506b5d9c81b80f41e4f2da46261ce1a1d5b2db2895ac0844370a9d8589d368"} Oct 02 21:49:12 crc kubenswrapper[4636]: I1002 21:49:12.756294 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mtfxk" event={"ID":"526f107a-7e75-492d-a0f8-79b4af330743","Type":"ContainerStarted","Data":"5665e8456c84de122532c3dbc79fc2d425cce32f11d1c1b75bead5fbae7217ae"} Oct 02 21:49:13 crc kubenswrapper[4636]: I1002 21:49:13.768786 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mtfxk" event={"ID":"526f107a-7e75-492d-a0f8-79b4af330743","Type":"ContainerStarted","Data":"18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1"} Oct 02 21:49:14 crc kubenswrapper[4636]: I1002 21:49:14.785482 4636 generic.go:334] "Generic (PLEG): container finished" podID="526f107a-7e75-492d-a0f8-79b4af330743" containerID="18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1" exitCode=0 Oct 02 21:49:14 crc kubenswrapper[4636]: I1002 21:49:14.785578 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mtfxk" event={"ID":"526f107a-7e75-492d-a0f8-79b4af330743","Type":"ContainerDied","Data":"18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1"} Oct 02 21:49:15 crc kubenswrapper[4636]: I1002 21:49:15.796624 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mtfxk" event={"ID":"526f107a-7e75-492d-a0f8-79b4af330743","Type":"ContainerStarted","Data":"3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71"} Oct 02 21:49:15 crc kubenswrapper[4636]: I1002 21:49:15.815021 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mtfxk" podStartSLOduration=2.244954671 podStartE2EDuration="4.815002698s" podCreationTimestamp="2025-10-02 21:49:11 +0000 UTC" firstStartedPulling="2025-10-02 21:49:12.75845315 +0000 UTC m=+1544.081461209" lastFinishedPulling="2025-10-02 21:49:15.328501197 +0000 UTC m=+1546.651509236" observedRunningTime="2025-10-02 21:49:15.813212471 +0000 UTC m=+1547.136220490" watchObservedRunningTime="2025-10-02 21:49:15.815002698 +0000 UTC m=+1547.138010717" Oct 02 21:49:20 crc kubenswrapper[4636]: I1002 21:49:20.604311 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:49:20 crc kubenswrapper[4636]: E1002 21:49:20.605499 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:49:21 crc kubenswrapper[4636]: I1002 21:49:21.865670 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:21 crc kubenswrapper[4636]: I1002 21:49:21.866946 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:21 crc kubenswrapper[4636]: I1002 21:49:21.916241 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:22 crc kubenswrapper[4636]: I1002 21:49:22.928575 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:23 crc kubenswrapper[4636]: I1002 21:49:23.004992 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mtfxk"] Oct 02 21:49:24 crc kubenswrapper[4636]: I1002 21:49:24.889237 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mtfxk" podUID="526f107a-7e75-492d-a0f8-79b4af330743" containerName="registry-server" containerID="cri-o://3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71" gracePeriod=2 Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.316480 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.499630 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-utilities\") pod \"526f107a-7e75-492d-a0f8-79b4af330743\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.499890 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-catalog-content\") pod \"526f107a-7e75-492d-a0f8-79b4af330743\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.500467 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-utilities" (OuterVolumeSpecName: "utilities") pod "526f107a-7e75-492d-a0f8-79b4af330743" (UID: "526f107a-7e75-492d-a0f8-79b4af330743"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.500886 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87npg\" (UniqueName: \"kubernetes.io/projected/526f107a-7e75-492d-a0f8-79b4af330743-kube-api-access-87npg\") pod \"526f107a-7e75-492d-a0f8-79b4af330743\" (UID: \"526f107a-7e75-492d-a0f8-79b4af330743\") " Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.501858 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.507089 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/526f107a-7e75-492d-a0f8-79b4af330743-kube-api-access-87npg" (OuterVolumeSpecName: "kube-api-access-87npg") pod "526f107a-7e75-492d-a0f8-79b4af330743" (UID: "526f107a-7e75-492d-a0f8-79b4af330743"). InnerVolumeSpecName "kube-api-access-87npg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.514154 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "526f107a-7e75-492d-a0f8-79b4af330743" (UID: "526f107a-7e75-492d-a0f8-79b4af330743"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.603079 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/526f107a-7e75-492d-a0f8-79b4af330743-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.603114 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87npg\" (UniqueName: \"kubernetes.io/projected/526f107a-7e75-492d-a0f8-79b4af330743-kube-api-access-87npg\") on node \"crc\" DevicePath \"\"" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.902443 4636 generic.go:334] "Generic (PLEG): container finished" podID="526f107a-7e75-492d-a0f8-79b4af330743" containerID="3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71" exitCode=0 Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.902507 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mtfxk" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.902503 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mtfxk" event={"ID":"526f107a-7e75-492d-a0f8-79b4af330743","Type":"ContainerDied","Data":"3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71"} Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.902576 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mtfxk" event={"ID":"526f107a-7e75-492d-a0f8-79b4af330743","Type":"ContainerDied","Data":"5665e8456c84de122532c3dbc79fc2d425cce32f11d1c1b75bead5fbae7217ae"} Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.902601 4636 scope.go:117] "RemoveContainer" containerID="3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.928964 4636 scope.go:117] "RemoveContainer" containerID="18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.932164 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mtfxk"] Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.961314 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mtfxk"] Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.968687 4636 scope.go:117] "RemoveContainer" containerID="b5506b5d9c81b80f41e4f2da46261ce1a1d5b2db2895ac0844370a9d8589d368" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.998934 4636 scope.go:117] "RemoveContainer" containerID="3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71" Oct 02 21:49:25 crc kubenswrapper[4636]: E1002 21:49:25.999436 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71\": container with ID starting with 3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71 not found: ID does not exist" containerID="3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.999478 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71"} err="failed to get container status \"3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71\": rpc error: code = NotFound desc = could not find container \"3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71\": container with ID starting with 3a7ee7fe425ab3f78d10fe41042c735b87214b34511a85bf6f3332d738437f71 not found: ID does not exist" Oct 02 21:49:25 crc kubenswrapper[4636]: I1002 21:49:25.999506 4636 scope.go:117] "RemoveContainer" containerID="18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1" Oct 02 21:49:26 crc kubenswrapper[4636]: E1002 21:49:25.999986 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1\": container with ID starting with 18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1 not found: ID does not exist" containerID="18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1" Oct 02 21:49:26 crc kubenswrapper[4636]: I1002 21:49:26.000085 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1"} err="failed to get container status \"18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1\": rpc error: code = NotFound desc = could not find container \"18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1\": container with ID starting with 18483edec3fa4eab784f5736eb8697672b411641b78ea0dcbcfcb9fb15963cc1 not found: ID does not exist" Oct 02 21:49:26 crc kubenswrapper[4636]: I1002 21:49:26.000186 4636 scope.go:117] "RemoveContainer" containerID="b5506b5d9c81b80f41e4f2da46261ce1a1d5b2db2895ac0844370a9d8589d368" Oct 02 21:49:26 crc kubenswrapper[4636]: E1002 21:49:26.000593 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5506b5d9c81b80f41e4f2da46261ce1a1d5b2db2895ac0844370a9d8589d368\": container with ID starting with b5506b5d9c81b80f41e4f2da46261ce1a1d5b2db2895ac0844370a9d8589d368 not found: ID does not exist" containerID="b5506b5d9c81b80f41e4f2da46261ce1a1d5b2db2895ac0844370a9d8589d368" Oct 02 21:49:26 crc kubenswrapper[4636]: I1002 21:49:26.000619 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5506b5d9c81b80f41e4f2da46261ce1a1d5b2db2895ac0844370a9d8589d368"} err="failed to get container status \"b5506b5d9c81b80f41e4f2da46261ce1a1d5b2db2895ac0844370a9d8589d368\": rpc error: code = NotFound desc = could not find container \"b5506b5d9c81b80f41e4f2da46261ce1a1d5b2db2895ac0844370a9d8589d368\": container with ID starting with b5506b5d9c81b80f41e4f2da46261ce1a1d5b2db2895ac0844370a9d8589d368 not found: ID does not exist" Oct 02 21:49:27 crc kubenswrapper[4636]: I1002 21:49:27.613680 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="526f107a-7e75-492d-a0f8-79b4af330743" path="/var/lib/kubelet/pods/526f107a-7e75-492d-a0f8-79b4af330743/volumes" Oct 02 21:49:35 crc kubenswrapper[4636]: I1002 21:49:35.603162 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:49:35 crc kubenswrapper[4636]: E1002 21:49:35.603905 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:49:50 crc kubenswrapper[4636]: I1002 21:49:50.604432 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:49:50 crc kubenswrapper[4636]: E1002 21:49:50.605318 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:50:03 crc kubenswrapper[4636]: I1002 21:50:03.604114 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:50:03 crc kubenswrapper[4636]: E1002 21:50:03.604994 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:50:03 crc kubenswrapper[4636]: I1002 21:50:03.703284 4636 scope.go:117] "RemoveContainer" containerID="650af3b5000135c170b1d7f4acdfba6ab8650ff85e59e1fef2a428b5847f6329" Oct 02 21:50:03 crc kubenswrapper[4636]: I1002 21:50:03.726350 4636 scope.go:117] "RemoveContainer" containerID="07fb6f09ed97bc60ae26587a5b976539cf0b8a2adc8ad857173c97f33c24d2b2" Oct 02 21:50:18 crc kubenswrapper[4636]: I1002 21:50:18.603524 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:50:18 crc kubenswrapper[4636]: E1002 21:50:18.604396 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:50:33 crc kubenswrapper[4636]: I1002 21:50:33.603977 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:50:33 crc kubenswrapper[4636]: E1002 21:50:33.604837 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:50:41 crc kubenswrapper[4636]: I1002 21:50:41.874442 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vghpv"] Oct 02 21:50:41 crc kubenswrapper[4636]: E1002 21:50:41.875560 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="526f107a-7e75-492d-a0f8-79b4af330743" containerName="extract-utilities" Oct 02 21:50:41 crc kubenswrapper[4636]: I1002 21:50:41.875577 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="526f107a-7e75-492d-a0f8-79b4af330743" containerName="extract-utilities" Oct 02 21:50:41 crc kubenswrapper[4636]: E1002 21:50:41.875613 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="526f107a-7e75-492d-a0f8-79b4af330743" containerName="extract-content" Oct 02 21:50:41 crc kubenswrapper[4636]: I1002 21:50:41.875623 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="526f107a-7e75-492d-a0f8-79b4af330743" containerName="extract-content" Oct 02 21:50:41 crc kubenswrapper[4636]: E1002 21:50:41.875876 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="526f107a-7e75-492d-a0f8-79b4af330743" containerName="registry-server" Oct 02 21:50:41 crc kubenswrapper[4636]: I1002 21:50:41.875888 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="526f107a-7e75-492d-a0f8-79b4af330743" containerName="registry-server" Oct 02 21:50:41 crc kubenswrapper[4636]: I1002 21:50:41.876170 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="526f107a-7e75-492d-a0f8-79b4af330743" containerName="registry-server" Oct 02 21:50:41 crc kubenswrapper[4636]: I1002 21:50:41.878360 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:41 crc kubenswrapper[4636]: I1002 21:50:41.882610 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vghpv"] Oct 02 21:50:42 crc kubenswrapper[4636]: I1002 21:50:42.009097 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffswd\" (UniqueName: \"kubernetes.io/projected/4091c567-ad11-4cb4-8cc4-901a917fe15b-kube-api-access-ffswd\") pod \"certified-operators-vghpv\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:42 crc kubenswrapper[4636]: I1002 21:50:42.009239 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-catalog-content\") pod \"certified-operators-vghpv\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:42 crc kubenswrapper[4636]: I1002 21:50:42.009287 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-utilities\") pod \"certified-operators-vghpv\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:42 crc kubenswrapper[4636]: I1002 21:50:42.111351 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-catalog-content\") pod \"certified-operators-vghpv\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:42 crc kubenswrapper[4636]: I1002 21:50:42.111419 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-utilities\") pod \"certified-operators-vghpv\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:42 crc kubenswrapper[4636]: I1002 21:50:42.111550 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffswd\" (UniqueName: \"kubernetes.io/projected/4091c567-ad11-4cb4-8cc4-901a917fe15b-kube-api-access-ffswd\") pod \"certified-operators-vghpv\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:42 crc kubenswrapper[4636]: I1002 21:50:42.111987 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-utilities\") pod \"certified-operators-vghpv\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:42 crc kubenswrapper[4636]: I1002 21:50:42.112076 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-catalog-content\") pod \"certified-operators-vghpv\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:42 crc kubenswrapper[4636]: I1002 21:50:42.131003 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffswd\" (UniqueName: \"kubernetes.io/projected/4091c567-ad11-4cb4-8cc4-901a917fe15b-kube-api-access-ffswd\") pod \"certified-operators-vghpv\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:42 crc kubenswrapper[4636]: I1002 21:50:42.215140 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:42 crc kubenswrapper[4636]: I1002 21:50:42.711778 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vghpv"] Oct 02 21:50:43 crc kubenswrapper[4636]: I1002 21:50:43.656606 4636 generic.go:334] "Generic (PLEG): container finished" podID="4091c567-ad11-4cb4-8cc4-901a917fe15b" containerID="2d44b0fdacba0ff73cb5af8d9cf02cdfeb303e5d3f5238efc78059e6a0186efe" exitCode=0 Oct 02 21:50:43 crc kubenswrapper[4636]: I1002 21:50:43.656700 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vghpv" event={"ID":"4091c567-ad11-4cb4-8cc4-901a917fe15b","Type":"ContainerDied","Data":"2d44b0fdacba0ff73cb5af8d9cf02cdfeb303e5d3f5238efc78059e6a0186efe"} Oct 02 21:50:43 crc kubenswrapper[4636]: I1002 21:50:43.656933 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vghpv" event={"ID":"4091c567-ad11-4cb4-8cc4-901a917fe15b","Type":"ContainerStarted","Data":"f444869e3a2f90e43a66dced79e8cc4bff61a4843253a72f4f44023753c182cc"} Oct 02 21:50:44 crc kubenswrapper[4636]: I1002 21:50:44.668414 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vghpv" event={"ID":"4091c567-ad11-4cb4-8cc4-901a917fe15b","Type":"ContainerStarted","Data":"44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f"} Oct 02 21:50:45 crc kubenswrapper[4636]: I1002 21:50:45.604382 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:50:45 crc kubenswrapper[4636]: E1002 21:50:45.605112 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:50:45 crc kubenswrapper[4636]: I1002 21:50:45.678004 4636 generic.go:334] "Generic (PLEG): container finished" podID="4091c567-ad11-4cb4-8cc4-901a917fe15b" containerID="44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f" exitCode=0 Oct 02 21:50:45 crc kubenswrapper[4636]: I1002 21:50:45.678051 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vghpv" event={"ID":"4091c567-ad11-4cb4-8cc4-901a917fe15b","Type":"ContainerDied","Data":"44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f"} Oct 02 21:50:46 crc kubenswrapper[4636]: I1002 21:50:46.688981 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vghpv" event={"ID":"4091c567-ad11-4cb4-8cc4-901a917fe15b","Type":"ContainerStarted","Data":"f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234"} Oct 02 21:50:46 crc kubenswrapper[4636]: I1002 21:50:46.716934 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vghpv" podStartSLOduration=3.300438554 podStartE2EDuration="5.716913038s" podCreationTimestamp="2025-10-02 21:50:41 +0000 UTC" firstStartedPulling="2025-10-02 21:50:43.658158569 +0000 UTC m=+1634.981166588" lastFinishedPulling="2025-10-02 21:50:46.074633053 +0000 UTC m=+1637.397641072" observedRunningTime="2025-10-02 21:50:46.712664245 +0000 UTC m=+1638.035672264" watchObservedRunningTime="2025-10-02 21:50:46.716913038 +0000 UTC m=+1638.039921067" Oct 02 21:50:52 crc kubenswrapper[4636]: I1002 21:50:52.215941 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:52 crc kubenswrapper[4636]: I1002 21:50:52.216500 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:52 crc kubenswrapper[4636]: I1002 21:50:52.283549 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:52 crc kubenswrapper[4636]: I1002 21:50:52.786238 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:52 crc kubenswrapper[4636]: I1002 21:50:52.843586 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vghpv"] Oct 02 21:50:54 crc kubenswrapper[4636]: I1002 21:50:54.773470 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vghpv" podUID="4091c567-ad11-4cb4-8cc4-901a917fe15b" containerName="registry-server" containerID="cri-o://f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234" gracePeriod=2 Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.195561 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.361279 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-utilities\") pod \"4091c567-ad11-4cb4-8cc4-901a917fe15b\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.361388 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffswd\" (UniqueName: \"kubernetes.io/projected/4091c567-ad11-4cb4-8cc4-901a917fe15b-kube-api-access-ffswd\") pod \"4091c567-ad11-4cb4-8cc4-901a917fe15b\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.361465 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-catalog-content\") pod \"4091c567-ad11-4cb4-8cc4-901a917fe15b\" (UID: \"4091c567-ad11-4cb4-8cc4-901a917fe15b\") " Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.363711 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-utilities" (OuterVolumeSpecName: "utilities") pod "4091c567-ad11-4cb4-8cc4-901a917fe15b" (UID: "4091c567-ad11-4cb4-8cc4-901a917fe15b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.368992 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4091c567-ad11-4cb4-8cc4-901a917fe15b-kube-api-access-ffswd" (OuterVolumeSpecName: "kube-api-access-ffswd") pod "4091c567-ad11-4cb4-8cc4-901a917fe15b" (UID: "4091c567-ad11-4cb4-8cc4-901a917fe15b"). InnerVolumeSpecName "kube-api-access-ffswd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.403985 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4091c567-ad11-4cb4-8cc4-901a917fe15b" (UID: "4091c567-ad11-4cb4-8cc4-901a917fe15b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.463660 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.463690 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffswd\" (UniqueName: \"kubernetes.io/projected/4091c567-ad11-4cb4-8cc4-901a917fe15b-kube-api-access-ffswd\") on node \"crc\" DevicePath \"\"" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.463699 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4091c567-ad11-4cb4-8cc4-901a917fe15b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.782724 4636 generic.go:334] "Generic (PLEG): container finished" podID="4091c567-ad11-4cb4-8cc4-901a917fe15b" containerID="f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234" exitCode=0 Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.782794 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vghpv" event={"ID":"4091c567-ad11-4cb4-8cc4-901a917fe15b","Type":"ContainerDied","Data":"f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234"} Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.782852 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vghpv" event={"ID":"4091c567-ad11-4cb4-8cc4-901a917fe15b","Type":"ContainerDied","Data":"f444869e3a2f90e43a66dced79e8cc4bff61a4843253a72f4f44023753c182cc"} Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.782877 4636 scope.go:117] "RemoveContainer" containerID="f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.782808 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vghpv" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.802984 4636 scope.go:117] "RemoveContainer" containerID="44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.814680 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vghpv"] Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.823982 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vghpv"] Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.826300 4636 scope.go:117] "RemoveContainer" containerID="2d44b0fdacba0ff73cb5af8d9cf02cdfeb303e5d3f5238efc78059e6a0186efe" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.881908 4636 scope.go:117] "RemoveContainer" containerID="f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234" Oct 02 21:50:55 crc kubenswrapper[4636]: E1002 21:50:55.882366 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234\": container with ID starting with f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234 not found: ID does not exist" containerID="f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.882464 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234"} err="failed to get container status \"f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234\": rpc error: code = NotFound desc = could not find container \"f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234\": container with ID starting with f6dac991aa167d06c8f18180294f71426c7f36c5c3abcaee84f5d6f9aac50234 not found: ID does not exist" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.882496 4636 scope.go:117] "RemoveContainer" containerID="44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f" Oct 02 21:50:55 crc kubenswrapper[4636]: E1002 21:50:55.883057 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f\": container with ID starting with 44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f not found: ID does not exist" containerID="44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.883110 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f"} err="failed to get container status \"44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f\": rpc error: code = NotFound desc = could not find container \"44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f\": container with ID starting with 44550ecdd2a41a40e9d9789b4caca84a20205aa4d33bdcc5ccbbe4b34e70e44f not found: ID does not exist" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.883142 4636 scope.go:117] "RemoveContainer" containerID="2d44b0fdacba0ff73cb5af8d9cf02cdfeb303e5d3f5238efc78059e6a0186efe" Oct 02 21:50:55 crc kubenswrapper[4636]: E1002 21:50:55.883546 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d44b0fdacba0ff73cb5af8d9cf02cdfeb303e5d3f5238efc78059e6a0186efe\": container with ID starting with 2d44b0fdacba0ff73cb5af8d9cf02cdfeb303e5d3f5238efc78059e6a0186efe not found: ID does not exist" containerID="2d44b0fdacba0ff73cb5af8d9cf02cdfeb303e5d3f5238efc78059e6a0186efe" Oct 02 21:50:55 crc kubenswrapper[4636]: I1002 21:50:55.883577 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d44b0fdacba0ff73cb5af8d9cf02cdfeb303e5d3f5238efc78059e6a0186efe"} err="failed to get container status \"2d44b0fdacba0ff73cb5af8d9cf02cdfeb303e5d3f5238efc78059e6a0186efe\": rpc error: code = NotFound desc = could not find container \"2d44b0fdacba0ff73cb5af8d9cf02cdfeb303e5d3f5238efc78059e6a0186efe\": container with ID starting with 2d44b0fdacba0ff73cb5af8d9cf02cdfeb303e5d3f5238efc78059e6a0186efe not found: ID does not exist" Oct 02 21:50:56 crc kubenswrapper[4636]: I1002 21:50:56.604433 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:50:56 crc kubenswrapper[4636]: E1002 21:50:56.604878 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:50:57 crc kubenswrapper[4636]: I1002 21:50:57.613095 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4091c567-ad11-4cb4-8cc4-901a917fe15b" path="/var/lib/kubelet/pods/4091c567-ad11-4cb4-8cc4-901a917fe15b/volumes" Oct 02 21:51:11 crc kubenswrapper[4636]: I1002 21:51:11.604547 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:51:11 crc kubenswrapper[4636]: E1002 21:51:11.605310 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:51:24 crc kubenswrapper[4636]: I1002 21:51:24.603534 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:51:24 crc kubenswrapper[4636]: E1002 21:51:24.604282 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:51:33 crc kubenswrapper[4636]: I1002 21:51:33.140039 4636 generic.go:334] "Generic (PLEG): container finished" podID="66588ffc-e8a9-4ced-a324-c9d436880e52" containerID="cbe70435eabea61bac2b751104015a1236d017477e90c063a3a50330dd341ed5" exitCode=0 Oct 02 21:51:33 crc kubenswrapper[4636]: I1002 21:51:33.140119 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" event={"ID":"66588ffc-e8a9-4ced-a324-c9d436880e52","Type":"ContainerDied","Data":"cbe70435eabea61bac2b751104015a1236d017477e90c063a3a50330dd341ed5"} Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.519726 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.601815 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-ssh-key\") pod \"66588ffc-e8a9-4ced-a324-c9d436880e52\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.602060 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-inventory\") pod \"66588ffc-e8a9-4ced-a324-c9d436880e52\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.602107 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dfv5\" (UniqueName: \"kubernetes.io/projected/66588ffc-e8a9-4ced-a324-c9d436880e52-kube-api-access-6dfv5\") pod \"66588ffc-e8a9-4ced-a324-c9d436880e52\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.602146 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-bootstrap-combined-ca-bundle\") pod \"66588ffc-e8a9-4ced-a324-c9d436880e52\" (UID: \"66588ffc-e8a9-4ced-a324-c9d436880e52\") " Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.607972 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66588ffc-e8a9-4ced-a324-c9d436880e52-kube-api-access-6dfv5" (OuterVolumeSpecName: "kube-api-access-6dfv5") pod "66588ffc-e8a9-4ced-a324-c9d436880e52" (UID: "66588ffc-e8a9-4ced-a324-c9d436880e52"). InnerVolumeSpecName "kube-api-access-6dfv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.609089 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "66588ffc-e8a9-4ced-a324-c9d436880e52" (UID: "66588ffc-e8a9-4ced-a324-c9d436880e52"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.630937 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-inventory" (OuterVolumeSpecName: "inventory") pod "66588ffc-e8a9-4ced-a324-c9d436880e52" (UID: "66588ffc-e8a9-4ced-a324-c9d436880e52"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.633572 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "66588ffc-e8a9-4ced-a324-c9d436880e52" (UID: "66588ffc-e8a9-4ced-a324-c9d436880e52"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.705616 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.705645 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dfv5\" (UniqueName: \"kubernetes.io/projected/66588ffc-e8a9-4ced-a324-c9d436880e52-kube-api-access-6dfv5\") on node \"crc\" DevicePath \"\"" Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.705657 4636 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:51:34 crc kubenswrapper[4636]: I1002 21:51:34.705667 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66588ffc-e8a9-4ced-a324-c9d436880e52-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.166174 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" event={"ID":"66588ffc-e8a9-4ced-a324-c9d436880e52","Type":"ContainerDied","Data":"5e3ae3025e727ec22a97153cefb9e5df4a298e79ba954baf4c901a06da26a9b9"} Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.166213 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e3ae3025e727ec22a97153cefb9e5df4a298e79ba954baf4c901a06da26a9b9" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.166248 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.253958 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb"] Oct 02 21:51:35 crc kubenswrapper[4636]: E1002 21:51:35.254397 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4091c567-ad11-4cb4-8cc4-901a917fe15b" containerName="extract-utilities" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.254413 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4091c567-ad11-4cb4-8cc4-901a917fe15b" containerName="extract-utilities" Oct 02 21:51:35 crc kubenswrapper[4636]: E1002 21:51:35.254443 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4091c567-ad11-4cb4-8cc4-901a917fe15b" containerName="extract-content" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.254450 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4091c567-ad11-4cb4-8cc4-901a917fe15b" containerName="extract-content" Oct 02 21:51:35 crc kubenswrapper[4636]: E1002 21:51:35.254465 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66588ffc-e8a9-4ced-a324-c9d436880e52" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.254473 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="66588ffc-e8a9-4ced-a324-c9d436880e52" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 02 21:51:35 crc kubenswrapper[4636]: E1002 21:51:35.254488 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4091c567-ad11-4cb4-8cc4-901a917fe15b" containerName="registry-server" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.254494 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4091c567-ad11-4cb4-8cc4-901a917fe15b" containerName="registry-server" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.254663 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="66588ffc-e8a9-4ced-a324-c9d436880e52" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.254683 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="4091c567-ad11-4cb4-8cc4-901a917fe15b" containerName="registry-server" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.255325 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.261121 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.262272 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.262630 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.262845 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.273071 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb"] Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.318824 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpjw2\" (UniqueName: \"kubernetes.io/projected/49804afe-9b10-4aef-bd85-414372732d36-kube-api-access-bpjw2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.318903 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.319363 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.420971 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.421032 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpjw2\" (UniqueName: \"kubernetes.io/projected/49804afe-9b10-4aef-bd85-414372732d36-kube-api-access-bpjw2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.421071 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.426865 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.427873 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.441338 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpjw2\" (UniqueName: \"kubernetes.io/projected/49804afe-9b10-4aef-bd85-414372732d36-kube-api-access-bpjw2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:51:35 crc kubenswrapper[4636]: I1002 21:51:35.576155 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:51:36 crc kubenswrapper[4636]: I1002 21:51:36.105053 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb"] Oct 02 21:51:36 crc kubenswrapper[4636]: I1002 21:51:36.109212 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 21:51:36 crc kubenswrapper[4636]: I1002 21:51:36.176670 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" event={"ID":"49804afe-9b10-4aef-bd85-414372732d36","Type":"ContainerStarted","Data":"590572e95617d32a13d4a24e405226ab2820b9f7045196fae49199fb25000584"} Oct 02 21:51:36 crc kubenswrapper[4636]: I1002 21:51:36.604681 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:51:36 crc kubenswrapper[4636]: E1002 21:51:36.604910 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:51:37 crc kubenswrapper[4636]: I1002 21:51:37.188809 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" event={"ID":"49804afe-9b10-4aef-bd85-414372732d36","Type":"ContainerStarted","Data":"4b024df0c9b195b52c048165af8a3bdb9e77dc477f8c81178ef1962f13119b55"} Oct 02 21:51:37 crc kubenswrapper[4636]: I1002 21:51:37.217511 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" podStartSLOduration=2.042402976 podStartE2EDuration="2.217482859s" podCreationTimestamp="2025-10-02 21:51:35 +0000 UTC" firstStartedPulling="2025-10-02 21:51:36.108824823 +0000 UTC m=+1687.431832862" lastFinishedPulling="2025-10-02 21:51:36.283904726 +0000 UTC m=+1687.606912745" observedRunningTime="2025-10-02 21:51:37.203551338 +0000 UTC m=+1688.526559437" watchObservedRunningTime="2025-10-02 21:51:37.217482859 +0000 UTC m=+1688.540490918" Oct 02 21:51:47 crc kubenswrapper[4636]: I1002 21:51:47.604158 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:51:47 crc kubenswrapper[4636]: E1002 21:51:47.605823 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:51:48 crc kubenswrapper[4636]: I1002 21:51:48.039314 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-p47cm"] Oct 02 21:51:48 crc kubenswrapper[4636]: I1002 21:51:48.050560 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-p47cm"] Oct 02 21:51:49 crc kubenswrapper[4636]: I1002 21:51:49.618132 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51" path="/var/lib/kubelet/pods/8d9d8da8-f6fb-46f1-a5a3-ef86c92f1f51/volumes" Oct 02 21:51:52 crc kubenswrapper[4636]: I1002 21:51:52.041102 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-g29vt"] Oct 02 21:51:52 crc kubenswrapper[4636]: I1002 21:51:52.058631 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-g29vt"] Oct 02 21:51:52 crc kubenswrapper[4636]: I1002 21:51:52.067822 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-hspb6"] Oct 02 21:51:52 crc kubenswrapper[4636]: I1002 21:51:52.074462 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-hspb6"] Oct 02 21:51:53 crc kubenswrapper[4636]: I1002 21:51:53.613113 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0000f181-00c3-43d4-aead-d064250a0099" path="/var/lib/kubelet/pods/0000f181-00c3-43d4-aead-d064250a0099/volumes" Oct 02 21:51:53 crc kubenswrapper[4636]: I1002 21:51:53.614625 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6de6aa77-aec2-4d73-8bfb-3aed342368e3" path="/var/lib/kubelet/pods/6de6aa77-aec2-4d73-8bfb-3aed342368e3/volumes" Oct 02 21:51:58 crc kubenswrapper[4636]: I1002 21:51:58.025816 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-c6ae-account-create-x7vrb"] Oct 02 21:51:58 crc kubenswrapper[4636]: I1002 21:51:58.033323 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-c6ae-account-create-x7vrb"] Oct 02 21:51:58 crc kubenswrapper[4636]: I1002 21:51:58.604512 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:51:58 crc kubenswrapper[4636]: E1002 21:51:58.604775 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:51:59 crc kubenswrapper[4636]: I1002 21:51:59.646375 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3eecdab-0886-4fa2-8e2b-29a5894cca5d" path="/var/lib/kubelet/pods/e3eecdab-0886-4fa2-8e2b-29a5894cca5d/volumes" Oct 02 21:52:03 crc kubenswrapper[4636]: I1002 21:52:03.031642 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-b879-account-create-2k959"] Oct 02 21:52:03 crc kubenswrapper[4636]: I1002 21:52:03.040693 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9ede-account-create-8c85t"] Oct 02 21:52:03 crc kubenswrapper[4636]: I1002 21:52:03.050324 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-b879-account-create-2k959"] Oct 02 21:52:03 crc kubenswrapper[4636]: I1002 21:52:03.059006 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9ede-account-create-8c85t"] Oct 02 21:52:03 crc kubenswrapper[4636]: I1002 21:52:03.615015 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389" path="/var/lib/kubelet/pods/b7c0f3df-7c2a-4ee3-8aa2-04dc2b2b8389/volumes" Oct 02 21:52:03 crc kubenswrapper[4636]: I1002 21:52:03.616103 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3946d1d-bde7-4a87-995b-090a8d334f3e" path="/var/lib/kubelet/pods/d3946d1d-bde7-4a87-995b-090a8d334f3e/volumes" Oct 02 21:52:03 crc kubenswrapper[4636]: I1002 21:52:03.834958 4636 scope.go:117] "RemoveContainer" containerID="9f2bdc2dac0842e30cb4b34728f2a5fdd78cd49c99a261a805013e9309e2142d" Oct 02 21:52:03 crc kubenswrapper[4636]: I1002 21:52:03.865829 4636 scope.go:117] "RemoveContainer" containerID="518ecb3cc6381d451823ee69e3aaf95454ff825101bad8d3008f25b0a482ca75" Oct 02 21:52:03 crc kubenswrapper[4636]: I1002 21:52:03.891585 4636 scope.go:117] "RemoveContainer" containerID="f5e34c92b6228decccc445d6577c52c3362e34ac837de2e2b31811c1c5beb30b" Oct 02 21:52:03 crc kubenswrapper[4636]: I1002 21:52:03.935490 4636 scope.go:117] "RemoveContainer" containerID="185139f1ef47be28d7af755b107c10df3c76f9cdefb00e14bb91a80113e151c9" Oct 02 21:52:03 crc kubenswrapper[4636]: I1002 21:52:03.978077 4636 scope.go:117] "RemoveContainer" containerID="173c919ae9f78fcd2f64c671a8b49049e46edb1c5b992fdf85a5bd790cd4cf3d" Oct 02 21:52:04 crc kubenswrapper[4636]: I1002 21:52:04.019078 4636 scope.go:117] "RemoveContainer" containerID="8fdc7a3e01a9df48562d32c4a04a5e9f66d109b66f674cd13d81b202472e7b31" Oct 02 21:52:04 crc kubenswrapper[4636]: I1002 21:52:04.040682 4636 scope.go:117] "RemoveContainer" containerID="e92dcfa6399d7043d9c0ce16bf21fc0985ea81b084b58efda8e3710843025f65" Oct 02 21:52:04 crc kubenswrapper[4636]: I1002 21:52:04.097137 4636 scope.go:117] "RemoveContainer" containerID="e2cd635d5616db67ebb9dc34cbd6403e0c029f6b4b51859033f990b05bd01f9e" Oct 02 21:52:04 crc kubenswrapper[4636]: I1002 21:52:04.117706 4636 scope.go:117] "RemoveContainer" containerID="a56550ac6397281b00be251df4fcfa74ba96fe76a4506429eaf65e5d38b3959d" Oct 02 21:52:04 crc kubenswrapper[4636]: I1002 21:52:04.138061 4636 scope.go:117] "RemoveContainer" containerID="13bb5e8e03cf1edf800c443ee030a7b7f7bfa19bae6b0242e1737552ab945c28" Oct 02 21:52:04 crc kubenswrapper[4636]: I1002 21:52:04.160034 4636 scope.go:117] "RemoveContainer" containerID="a47c4b54de34c13631f67bc8eaea0fd22d84829568527c0d680f1bd8b324af3e" Oct 02 21:52:04 crc kubenswrapper[4636]: I1002 21:52:04.199728 4636 scope.go:117] "RemoveContainer" containerID="9a4e3d8c8c2f66e0e4f98a9960e2f41c3e51934298856b32328857618c25a64b" Oct 02 21:52:11 crc kubenswrapper[4636]: I1002 21:52:11.603834 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:52:11 crc kubenswrapper[4636]: E1002 21:52:11.604528 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:52:22 crc kubenswrapper[4636]: I1002 21:52:22.039616 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-wzgzt"] Oct 02 21:52:22 crc kubenswrapper[4636]: I1002 21:52:22.048339 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-f96c4"] Oct 02 21:52:22 crc kubenswrapper[4636]: I1002 21:52:22.056141 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-zfgnq"] Oct 02 21:52:22 crc kubenswrapper[4636]: I1002 21:52:22.064650 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-wzgzt"] Oct 02 21:52:22 crc kubenswrapper[4636]: I1002 21:52:22.072478 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-f96c4"] Oct 02 21:52:22 crc kubenswrapper[4636]: I1002 21:52:22.079195 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-zfgnq"] Oct 02 21:52:23 crc kubenswrapper[4636]: I1002 21:52:23.604002 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:52:23 crc kubenswrapper[4636]: E1002 21:52:23.604574 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:52:23 crc kubenswrapper[4636]: I1002 21:52:23.622611 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb5e7d3-f883-4823-b53f-4fc2abd1e716" path="/var/lib/kubelet/pods/7bb5e7d3-f883-4823-b53f-4fc2abd1e716/volumes" Oct 02 21:52:23 crc kubenswrapper[4636]: I1002 21:52:23.623530 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8eef44f3-49f5-40c6-b160-a72304983227" path="/var/lib/kubelet/pods/8eef44f3-49f5-40c6-b160-a72304983227/volumes" Oct 02 21:52:23 crc kubenswrapper[4636]: I1002 21:52:23.625183 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4a90155-455b-4a0f-b579-98d7a261db7b" path="/var/lib/kubelet/pods/a4a90155-455b-4a0f-b579-98d7a261db7b/volumes" Oct 02 21:52:28 crc kubenswrapper[4636]: I1002 21:52:28.046440 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-qdzwb"] Oct 02 21:52:28 crc kubenswrapper[4636]: I1002 21:52:28.059100 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-qdzwb"] Oct 02 21:52:29 crc kubenswrapper[4636]: I1002 21:52:29.615026 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="928272a1-ab4f-42e0-9fa1-64228b77271f" path="/var/lib/kubelet/pods/928272a1-ab4f-42e0-9fa1-64228b77271f/volumes" Oct 02 21:52:33 crc kubenswrapper[4636]: I1002 21:52:33.024851 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-1ea2-account-create-hkf65"] Oct 02 21:52:33 crc kubenswrapper[4636]: I1002 21:52:33.031712 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-1ea2-account-create-hkf65"] Oct 02 21:52:33 crc kubenswrapper[4636]: I1002 21:52:33.618371 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac128b50-2394-48cd-85c8-8dd21d7faca5" path="/var/lib/kubelet/pods/ac128b50-2394-48cd-85c8-8dd21d7faca5/volumes" Oct 02 21:52:34 crc kubenswrapper[4636]: I1002 21:52:34.024620 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-3a73-account-create-btbnb"] Oct 02 21:52:34 crc kubenswrapper[4636]: I1002 21:52:34.033857 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-2a44-account-create-t8qgl"] Oct 02 21:52:34 crc kubenswrapper[4636]: I1002 21:52:34.044322 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-3a73-account-create-btbnb"] Oct 02 21:52:34 crc kubenswrapper[4636]: I1002 21:52:34.050839 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-2a44-account-create-t8qgl"] Oct 02 21:52:35 crc kubenswrapper[4636]: I1002 21:52:35.616135 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dc3b476-645a-40d8-a4ba-293d7b39acf9" path="/var/lib/kubelet/pods/4dc3b476-645a-40d8-a4ba-293d7b39acf9/volumes" Oct 02 21:52:35 crc kubenswrapper[4636]: I1002 21:52:35.617304 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb93326f-3e16-453a-8b7d-1b5fb6e4b56d" path="/var/lib/kubelet/pods/eb93326f-3e16-453a-8b7d-1b5fb6e4b56d/volumes" Oct 02 21:52:38 crc kubenswrapper[4636]: I1002 21:52:38.603604 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:52:38 crc kubenswrapper[4636]: E1002 21:52:38.604157 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:52:49 crc kubenswrapper[4636]: I1002 21:52:49.038733 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-qgkxv"] Oct 02 21:52:49 crc kubenswrapper[4636]: I1002 21:52:49.045700 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-qgkxv"] Oct 02 21:52:49 crc kubenswrapper[4636]: I1002 21:52:49.619991 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f7bc59e-3c13-4a51-9494-d45734d6c70c" path="/var/lib/kubelet/pods/4f7bc59e-3c13-4a51-9494-d45734d6c70c/volumes" Oct 02 21:52:52 crc kubenswrapper[4636]: I1002 21:52:52.603341 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:52:52 crc kubenswrapper[4636]: E1002 21:52:52.603931 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:53:04 crc kubenswrapper[4636]: I1002 21:53:04.333818 4636 scope.go:117] "RemoveContainer" containerID="544fdf35921ef685679cdff969b03ce3801a0c4b324d781f1abbdd79aba66d96" Oct 02 21:53:04 crc kubenswrapper[4636]: I1002 21:53:04.365049 4636 scope.go:117] "RemoveContainer" containerID="b9eaab03bcb68309b8cdd662f894fa051226141d46fba51f25ff62d2ba2cb015" Oct 02 21:53:04 crc kubenswrapper[4636]: I1002 21:53:04.412108 4636 scope.go:117] "RemoveContainer" containerID="13815fad88a054bdb5cd40a674bbba40fe7eb211011e2f6dc28cc25214b5c7ff" Oct 02 21:53:04 crc kubenswrapper[4636]: I1002 21:53:04.449261 4636 scope.go:117] "RemoveContainer" containerID="03dddc28db964c86107328fbde314727a15bc6a5c6f25bb6566323c5d48c2629" Oct 02 21:53:04 crc kubenswrapper[4636]: I1002 21:53:04.493630 4636 scope.go:117] "RemoveContainer" containerID="89309f63b4977f58f0aad4acb842e8d4b758f25aa59d05b6f836880482b59440" Oct 02 21:53:04 crc kubenswrapper[4636]: I1002 21:53:04.536126 4636 scope.go:117] "RemoveContainer" containerID="06b2fc9f60d0d941269d5abd3a4041dd72c08360ffa03a7e4178ea8e99f2025a" Oct 02 21:53:04 crc kubenswrapper[4636]: I1002 21:53:04.574547 4636 scope.go:117] "RemoveContainer" containerID="b562cf48f873543cebea199ecdc361a425df054bff5946872490509fe7c68916" Oct 02 21:53:04 crc kubenswrapper[4636]: I1002 21:53:04.610431 4636 scope.go:117] "RemoveContainer" containerID="a6549a0ceb46475aa404cc1ffe2cdae3622f15964572528f4deea0159a5a6562" Oct 02 21:53:05 crc kubenswrapper[4636]: I1002 21:53:05.603486 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:53:05 crc kubenswrapper[4636]: E1002 21:53:05.604136 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:53:09 crc kubenswrapper[4636]: I1002 21:53:09.031203 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-cf9ld"] Oct 02 21:53:09 crc kubenswrapper[4636]: I1002 21:53:09.040224 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-cf9ld"] Oct 02 21:53:09 crc kubenswrapper[4636]: I1002 21:53:09.615081 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70ce2186-3a61-4f36-a51b-52a7bfeabdf1" path="/var/lib/kubelet/pods/70ce2186-3a61-4f36-a51b-52a7bfeabdf1/volumes" Oct 02 21:53:13 crc kubenswrapper[4636]: I1002 21:53:13.102879 4636 generic.go:334] "Generic (PLEG): container finished" podID="49804afe-9b10-4aef-bd85-414372732d36" containerID="4b024df0c9b195b52c048165af8a3bdb9e77dc477f8c81178ef1962f13119b55" exitCode=0 Oct 02 21:53:13 crc kubenswrapper[4636]: I1002 21:53:13.103016 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" event={"ID":"49804afe-9b10-4aef-bd85-414372732d36","Type":"ContainerDied","Data":"4b024df0c9b195b52c048165af8a3bdb9e77dc477f8c81178ef1962f13119b55"} Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.033684 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-q5lqg"] Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.041199 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-q5lqg"] Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.528455 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.666030 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-ssh-key\") pod \"49804afe-9b10-4aef-bd85-414372732d36\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.666145 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpjw2\" (UniqueName: \"kubernetes.io/projected/49804afe-9b10-4aef-bd85-414372732d36-kube-api-access-bpjw2\") pod \"49804afe-9b10-4aef-bd85-414372732d36\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.666242 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-inventory\") pod \"49804afe-9b10-4aef-bd85-414372732d36\" (UID: \"49804afe-9b10-4aef-bd85-414372732d36\") " Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.671948 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49804afe-9b10-4aef-bd85-414372732d36-kube-api-access-bpjw2" (OuterVolumeSpecName: "kube-api-access-bpjw2") pod "49804afe-9b10-4aef-bd85-414372732d36" (UID: "49804afe-9b10-4aef-bd85-414372732d36"). InnerVolumeSpecName "kube-api-access-bpjw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.694851 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-inventory" (OuterVolumeSpecName: "inventory") pod "49804afe-9b10-4aef-bd85-414372732d36" (UID: "49804afe-9b10-4aef-bd85-414372732d36"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.699054 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "49804afe-9b10-4aef-bd85-414372732d36" (UID: "49804afe-9b10-4aef-bd85-414372732d36"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.768076 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.768101 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpjw2\" (UniqueName: \"kubernetes.io/projected/49804afe-9b10-4aef-bd85-414372732d36-kube-api-access-bpjw2\") on node \"crc\" DevicePath \"\"" Oct 02 21:53:14 crc kubenswrapper[4636]: I1002 21:53:14.768115 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49804afe-9b10-4aef-bd85-414372732d36-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.129969 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" event={"ID":"49804afe-9b10-4aef-bd85-414372732d36","Type":"ContainerDied","Data":"590572e95617d32a13d4a24e405226ab2820b9f7045196fae49199fb25000584"} Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.130006 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="590572e95617d32a13d4a24e405226ab2820b9f7045196fae49199fb25000584" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.130057 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.232834 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz"] Oct 02 21:53:15 crc kubenswrapper[4636]: E1002 21:53:15.233218 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49804afe-9b10-4aef-bd85-414372732d36" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.233235 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="49804afe-9b10-4aef-bd85-414372732d36" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.233445 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="49804afe-9b10-4aef-bd85-414372732d36" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.233995 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.236514 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.239320 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.240835 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.249834 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.257758 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz"] Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.379503 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.379553 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.379821 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgbfd\" (UniqueName: \"kubernetes.io/projected/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-kube-api-access-bgbfd\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.481647 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.481958 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.482063 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgbfd\" (UniqueName: \"kubernetes.io/projected/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-kube-api-access-bgbfd\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.489499 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.489828 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.501393 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgbfd\" (UniqueName: \"kubernetes.io/projected/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-kube-api-access-bgbfd\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.547969 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:53:15 crc kubenswrapper[4636]: I1002 21:53:15.616714 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6" path="/var/lib/kubelet/pods/d0cdc7cc-5de5-40f5-ac17-12c4be5a74d6/volumes" Oct 02 21:53:16 crc kubenswrapper[4636]: I1002 21:53:16.129591 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz"] Oct 02 21:53:17 crc kubenswrapper[4636]: I1002 21:53:17.149225 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" event={"ID":"7345fa48-35b0-41e9-ae50-3920e8a1a6f5","Type":"ContainerStarted","Data":"2f6785a9c41db7c38d9ca9b9310fed2bc00ea6801a37dede613113d3fe55813f"} Oct 02 21:53:17 crc kubenswrapper[4636]: I1002 21:53:17.149287 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" event={"ID":"7345fa48-35b0-41e9-ae50-3920e8a1a6f5","Type":"ContainerStarted","Data":"1acd4d7b6c78907df440efc8378afdb1b2c00b1c6e52927ab7fab2c32db421e5"} Oct 02 21:53:17 crc kubenswrapper[4636]: I1002 21:53:17.177066 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" podStartSLOduration=1.982998483 podStartE2EDuration="2.177047401s" podCreationTimestamp="2025-10-02 21:53:15 +0000 UTC" firstStartedPulling="2025-10-02 21:53:16.140500275 +0000 UTC m=+1787.463508294" lastFinishedPulling="2025-10-02 21:53:16.334549193 +0000 UTC m=+1787.657557212" observedRunningTime="2025-10-02 21:53:17.170870986 +0000 UTC m=+1788.493879015" watchObservedRunningTime="2025-10-02 21:53:17.177047401 +0000 UTC m=+1788.500055420" Oct 02 21:53:17 crc kubenswrapper[4636]: I1002 21:53:17.604721 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:53:17 crc kubenswrapper[4636]: E1002 21:53:17.605101 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:53:29 crc kubenswrapper[4636]: I1002 21:53:29.044029 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-llj5j"] Oct 02 21:53:29 crc kubenswrapper[4636]: I1002 21:53:29.051081 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-7fdh9"] Oct 02 21:53:29 crc kubenswrapper[4636]: I1002 21:53:29.058299 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-7fdh9"] Oct 02 21:53:29 crc kubenswrapper[4636]: I1002 21:53:29.068158 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-llj5j"] Oct 02 21:53:29 crc kubenswrapper[4636]: I1002 21:53:29.623179 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11ba2a28-597f-4252-8922-6360d60a5c81" path="/var/lib/kubelet/pods/11ba2a28-597f-4252-8922-6360d60a5c81/volumes" Oct 02 21:53:29 crc kubenswrapper[4636]: I1002 21:53:29.624415 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2e7e09e-0db9-4149-83d9-80163c11d203" path="/var/lib/kubelet/pods/e2e7e09e-0db9-4149-83d9-80163c11d203/volumes" Oct 02 21:53:32 crc kubenswrapper[4636]: I1002 21:53:32.604155 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:53:32 crc kubenswrapper[4636]: E1002 21:53:32.604909 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:53:37 crc kubenswrapper[4636]: I1002 21:53:37.031850 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-4vrl8"] Oct 02 21:53:37 crc kubenswrapper[4636]: I1002 21:53:37.047096 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-4vrl8"] Oct 02 21:53:37 crc kubenswrapper[4636]: I1002 21:53:37.618788 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b773903-9fdb-4fdd-97b5-1c89103b3a0b" path="/var/lib/kubelet/pods/1b773903-9fdb-4fdd-97b5-1c89103b3a0b/volumes" Oct 02 21:53:44 crc kubenswrapper[4636]: I1002 21:53:44.604191 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:53:44 crc kubenswrapper[4636]: E1002 21:53:44.604893 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 21:53:59 crc kubenswrapper[4636]: I1002 21:53:59.621582 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:54:00 crc kubenswrapper[4636]: I1002 21:54:00.578580 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"c6da1ee0722f89363964853239734f7c0beeeafaa1f6842cc16f9a803dfcc9e2"} Oct 02 21:54:04 crc kubenswrapper[4636]: I1002 21:54:04.786477 4636 scope.go:117] "RemoveContainer" containerID="f08c8397ab1d0020668ad7833d6b91aa201402ed5b8bad0c4b48d72bc7d2e423" Oct 02 21:54:04 crc kubenswrapper[4636]: I1002 21:54:04.829657 4636 scope.go:117] "RemoveContainer" containerID="f30e5ff61f2780092fb590bcce015ccbde7c6ea6989eb15df76fa5a86d18e767" Oct 02 21:54:04 crc kubenswrapper[4636]: I1002 21:54:04.918606 4636 scope.go:117] "RemoveContainer" containerID="42666bb860e39a1ed1b61416944660e0cfd3c3b77ab86eebc77bdf16462548fd" Oct 02 21:54:04 crc kubenswrapper[4636]: I1002 21:54:04.960954 4636 scope.go:117] "RemoveContainer" containerID="3a47d6e81f4218b0278f11546129bb57cc53d52b9eddd4ed6e77b9d1679eaf9a" Oct 02 21:54:05 crc kubenswrapper[4636]: I1002 21:54:05.003062 4636 scope.go:117] "RemoveContainer" containerID="12dd581a1b23a750e6aa553ae7e249bdb4fe3c8e3ec50624de5dd861d380ca16" Oct 02 21:54:32 crc kubenswrapper[4636]: I1002 21:54:32.054681 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-q9x4w"] Oct 02 21:54:32 crc kubenswrapper[4636]: I1002 21:54:32.067890 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-4zfc2"] Oct 02 21:54:32 crc kubenswrapper[4636]: I1002 21:54:32.077613 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-vbvdn"] Oct 02 21:54:32 crc kubenswrapper[4636]: I1002 21:54:32.090036 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-q9x4w"] Oct 02 21:54:32 crc kubenswrapper[4636]: I1002 21:54:32.096347 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-vbvdn"] Oct 02 21:54:32 crc kubenswrapper[4636]: I1002 21:54:32.102465 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-4zfc2"] Oct 02 21:54:32 crc kubenswrapper[4636]: I1002 21:54:32.902209 4636 generic.go:334] "Generic (PLEG): container finished" podID="7345fa48-35b0-41e9-ae50-3920e8a1a6f5" containerID="2f6785a9c41db7c38d9ca9b9310fed2bc00ea6801a37dede613113d3fe55813f" exitCode=0 Oct 02 21:54:32 crc kubenswrapper[4636]: I1002 21:54:32.902258 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" event={"ID":"7345fa48-35b0-41e9-ae50-3920e8a1a6f5","Type":"ContainerDied","Data":"2f6785a9c41db7c38d9ca9b9310fed2bc00ea6801a37dede613113d3fe55813f"} Oct 02 21:54:33 crc kubenswrapper[4636]: I1002 21:54:33.616646 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01304c15-3331-4085-8769-2620145a308a" path="/var/lib/kubelet/pods/01304c15-3331-4085-8769-2620145a308a/volumes" Oct 02 21:54:33 crc kubenswrapper[4636]: I1002 21:54:33.619246 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68852986-2dd4-43e8-ac5d-acbe811855ca" path="/var/lib/kubelet/pods/68852986-2dd4-43e8-ac5d-acbe811855ca/volumes" Oct 02 21:54:33 crc kubenswrapper[4636]: I1002 21:54:33.620374 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8e71d83-78a0-4b7f-b759-c36325da0561" path="/var/lib/kubelet/pods/b8e71d83-78a0-4b7f-b759-c36325da0561/volumes" Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.389187 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.513159 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgbfd\" (UniqueName: \"kubernetes.io/projected/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-kube-api-access-bgbfd\") pod \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.513258 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-inventory\") pod \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.513313 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-ssh-key\") pod \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\" (UID: \"7345fa48-35b0-41e9-ae50-3920e8a1a6f5\") " Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.540533 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-kube-api-access-bgbfd" (OuterVolumeSpecName: "kube-api-access-bgbfd") pod "7345fa48-35b0-41e9-ae50-3920e8a1a6f5" (UID: "7345fa48-35b0-41e9-ae50-3920e8a1a6f5"). InnerVolumeSpecName "kube-api-access-bgbfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.563907 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-inventory" (OuterVolumeSpecName: "inventory") pod "7345fa48-35b0-41e9-ae50-3920e8a1a6f5" (UID: "7345fa48-35b0-41e9-ae50-3920e8a1a6f5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.564013 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7345fa48-35b0-41e9-ae50-3920e8a1a6f5" (UID: "7345fa48-35b0-41e9-ae50-3920e8a1a6f5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.615454 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.615495 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.615507 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgbfd\" (UniqueName: \"kubernetes.io/projected/7345fa48-35b0-41e9-ae50-3920e8a1a6f5-kube-api-access-bgbfd\") on node \"crc\" DevicePath \"\"" Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.922208 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" event={"ID":"7345fa48-35b0-41e9-ae50-3920e8a1a6f5","Type":"ContainerDied","Data":"1acd4d7b6c78907df440efc8378afdb1b2c00b1c6e52927ab7fab2c32db421e5"} Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.922247 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1acd4d7b6c78907df440efc8378afdb1b2c00b1c6e52927ab7fab2c32db421e5" Oct 02 21:54:34 crc kubenswrapper[4636]: I1002 21:54:34.922297 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.015141 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq"] Oct 02 21:54:35 crc kubenswrapper[4636]: E1002 21:54:35.015582 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7345fa48-35b0-41e9-ae50-3920e8a1a6f5" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.015604 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="7345fa48-35b0-41e9-ae50-3920e8a1a6f5" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.015836 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="7345fa48-35b0-41e9-ae50-3920e8a1a6f5" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.016526 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.018334 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.020106 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.020439 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.020888 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.038689 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq"] Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.123887 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk2q4\" (UniqueName: \"kubernetes.io/projected/581c99cd-eb0c-4117-8475-44bc85027a9a-kube-api-access-mk2q4\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.124128 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.124288 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.226148 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk2q4\" (UniqueName: \"kubernetes.io/projected/581c99cd-eb0c-4117-8475-44bc85027a9a-kube-api-access-mk2q4\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.226582 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.226769 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.230290 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.231965 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.242379 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk2q4\" (UniqueName: \"kubernetes.io/projected/581c99cd-eb0c-4117-8475-44bc85027a9a-kube-api-access-mk2q4\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.333610 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.848454 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq"] Oct 02 21:54:35 crc kubenswrapper[4636]: I1002 21:54:35.931011 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" event={"ID":"581c99cd-eb0c-4117-8475-44bc85027a9a","Type":"ContainerStarted","Data":"b7b9205caf8400adfbf7d060b1b185f3717b00073b3ce3cb8d6af8edbe47d134"} Oct 02 21:54:36 crc kubenswrapper[4636]: I1002 21:54:36.944634 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" event={"ID":"581c99cd-eb0c-4117-8475-44bc85027a9a","Type":"ContainerStarted","Data":"40da4a7faba75aee72bdf4d07e33b5424bd84876c8315bad81b0de8639378547"} Oct 02 21:54:36 crc kubenswrapper[4636]: I1002 21:54:36.977528 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" podStartSLOduration=2.795336908 podStartE2EDuration="2.97750942s" podCreationTimestamp="2025-10-02 21:54:34 +0000 UTC" firstStartedPulling="2025-10-02 21:54:35.860555753 +0000 UTC m=+1867.183563772" lastFinishedPulling="2025-10-02 21:54:36.042728265 +0000 UTC m=+1867.365736284" observedRunningTime="2025-10-02 21:54:36.965352956 +0000 UTC m=+1868.288361005" watchObservedRunningTime="2025-10-02 21:54:36.97750942 +0000 UTC m=+1868.300517439" Oct 02 21:54:41 crc kubenswrapper[4636]: I1002 21:54:41.988730 4636 generic.go:334] "Generic (PLEG): container finished" podID="581c99cd-eb0c-4117-8475-44bc85027a9a" containerID="40da4a7faba75aee72bdf4d07e33b5424bd84876c8315bad81b0de8639378547" exitCode=0 Oct 02 21:54:41 crc kubenswrapper[4636]: I1002 21:54:41.988827 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" event={"ID":"581c99cd-eb0c-4117-8475-44bc85027a9a","Type":"ContainerDied","Data":"40da4a7faba75aee72bdf4d07e33b5424bd84876c8315bad81b0de8639378547"} Oct 02 21:54:42 crc kubenswrapper[4636]: I1002 21:54:42.040635 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-45bc-account-create-q5dkm"] Oct 02 21:54:42 crc kubenswrapper[4636]: I1002 21:54:42.052580 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-1ba0-account-create-c6g94"] Oct 02 21:54:42 crc kubenswrapper[4636]: I1002 21:54:42.061471 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-2c0f-account-create-k2pkd"] Oct 02 21:54:42 crc kubenswrapper[4636]: I1002 21:54:42.068426 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-45bc-account-create-q5dkm"] Oct 02 21:54:42 crc kubenswrapper[4636]: I1002 21:54:42.077265 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-2c0f-account-create-k2pkd"] Oct 02 21:54:42 crc kubenswrapper[4636]: I1002 21:54:42.083681 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-1ba0-account-create-c6g94"] Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.374465 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.523404 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mk2q4\" (UniqueName: \"kubernetes.io/projected/581c99cd-eb0c-4117-8475-44bc85027a9a-kube-api-access-mk2q4\") pod \"581c99cd-eb0c-4117-8475-44bc85027a9a\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.523632 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-ssh-key\") pod \"581c99cd-eb0c-4117-8475-44bc85027a9a\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.524616 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-inventory\") pod \"581c99cd-eb0c-4117-8475-44bc85027a9a\" (UID: \"581c99cd-eb0c-4117-8475-44bc85027a9a\") " Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.529432 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/581c99cd-eb0c-4117-8475-44bc85027a9a-kube-api-access-mk2q4" (OuterVolumeSpecName: "kube-api-access-mk2q4") pod "581c99cd-eb0c-4117-8475-44bc85027a9a" (UID: "581c99cd-eb0c-4117-8475-44bc85027a9a"). InnerVolumeSpecName "kube-api-access-mk2q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.553840 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "581c99cd-eb0c-4117-8475-44bc85027a9a" (UID: "581c99cd-eb0c-4117-8475-44bc85027a9a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.580987 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-inventory" (OuterVolumeSpecName: "inventory") pod "581c99cd-eb0c-4117-8475-44bc85027a9a" (UID: "581c99cd-eb0c-4117-8475-44bc85027a9a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.624167 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2df6b1a1-db69-4eaa-a32e-2ba717d9eba7" path="/var/lib/kubelet/pods/2df6b1a1-db69-4eaa-a32e-2ba717d9eba7/volumes" Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.625434 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a99242c-2b7f-4ef3-85bf-e4f0a870fd25" path="/var/lib/kubelet/pods/7a99242c-2b7f-4ef3-85bf-e4f0a870fd25/volumes" Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.626373 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5b83cd7-4b9a-40b3-9e66-af7c691c2a21" path="/var/lib/kubelet/pods/d5b83cd7-4b9a-40b3-9e66-af7c691c2a21/volumes" Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.628237 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mk2q4\" (UniqueName: \"kubernetes.io/projected/581c99cd-eb0c-4117-8475-44bc85027a9a-kube-api-access-mk2q4\") on node \"crc\" DevicePath \"\"" Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.628281 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:54:43 crc kubenswrapper[4636]: I1002 21:54:43.628299 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/581c99cd-eb0c-4117-8475-44bc85027a9a-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.010357 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" event={"ID":"581c99cd-eb0c-4117-8475-44bc85027a9a","Type":"ContainerDied","Data":"b7b9205caf8400adfbf7d060b1b185f3717b00073b3ce3cb8d6af8edbe47d134"} Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.010396 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7b9205caf8400adfbf7d060b1b185f3717b00073b3ce3cb8d6af8edbe47d134" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.010442 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.111256 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b"] Oct 02 21:54:44 crc kubenswrapper[4636]: E1002 21:54:44.111906 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="581c99cd-eb0c-4117-8475-44bc85027a9a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.111975 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="581c99cd-eb0c-4117-8475-44bc85027a9a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.112208 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="581c99cd-eb0c-4117-8475-44bc85027a9a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.112922 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.116924 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.117153 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.117715 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.125490 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b"] Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.161062 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.240095 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzth8\" (UniqueName: \"kubernetes.io/projected/0db4951d-65e4-4a9e-9761-433d6cfb17c9-kube-api-access-wzth8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mcd5b\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.240179 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mcd5b\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.240208 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mcd5b\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.342402 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mcd5b\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.342434 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mcd5b\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.342571 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzth8\" (UniqueName: \"kubernetes.io/projected/0db4951d-65e4-4a9e-9761-433d6cfb17c9-kube-api-access-wzth8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mcd5b\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.347270 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mcd5b\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.350466 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mcd5b\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.361249 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzth8\" (UniqueName: \"kubernetes.io/projected/0db4951d-65e4-4a9e-9761-433d6cfb17c9-kube-api-access-wzth8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mcd5b\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.478286 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:54:44 crc kubenswrapper[4636]: I1002 21:54:44.996762 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b"] Oct 02 21:54:45 crc kubenswrapper[4636]: I1002 21:54:45.019181 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" event={"ID":"0db4951d-65e4-4a9e-9761-433d6cfb17c9","Type":"ContainerStarted","Data":"0827efc264650a34f1865f6f1b19009e4e36423b2c84eef47dba9bccf01d39cf"} Oct 02 21:54:46 crc kubenswrapper[4636]: I1002 21:54:46.029226 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" event={"ID":"0db4951d-65e4-4a9e-9761-433d6cfb17c9","Type":"ContainerStarted","Data":"3a3b55cfb985cbdc611b23f9b73ffee81b43d9ad04a4a9fd9496a46cb66d6bdf"} Oct 02 21:54:46 crc kubenswrapper[4636]: I1002 21:54:46.053703 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" podStartSLOduration=1.872862194 podStartE2EDuration="2.053669115s" podCreationTimestamp="2025-10-02 21:54:44 +0000 UTC" firstStartedPulling="2025-10-02 21:54:45.004220355 +0000 UTC m=+1876.327228374" lastFinishedPulling="2025-10-02 21:54:45.185027276 +0000 UTC m=+1876.508035295" observedRunningTime="2025-10-02 21:54:46.046126465 +0000 UTC m=+1877.369134524" watchObservedRunningTime="2025-10-02 21:54:46.053669115 +0000 UTC m=+1877.376677134" Oct 02 21:55:05 crc kubenswrapper[4636]: I1002 21:55:05.139640 4636 scope.go:117] "RemoveContainer" containerID="b07381b2d10c632073e6d760a8bbc5af2236f21bfe24d32ee6f38a6a7e162cf5" Oct 02 21:55:05 crc kubenswrapper[4636]: I1002 21:55:05.179005 4636 scope.go:117] "RemoveContainer" containerID="04d8510dedeed1780ec92f6df059109baad7c3ac86af080906dbc28b100b262b" Oct 02 21:55:05 crc kubenswrapper[4636]: I1002 21:55:05.234801 4636 scope.go:117] "RemoveContainer" containerID="92aac92d398855a09f27798c9cd67a0d12a0b8806635b3c842184ee1eee83016" Oct 02 21:55:05 crc kubenswrapper[4636]: I1002 21:55:05.291370 4636 scope.go:117] "RemoveContainer" containerID="47b8fcd0901d0adc7fae19fd4cf2a595883d1907113df97f0f0da969324143ce" Oct 02 21:55:05 crc kubenswrapper[4636]: I1002 21:55:05.323462 4636 scope.go:117] "RemoveContainer" containerID="9751da6246c8ee3ec6139a9c94ad9e577e88874131e36a9beee5a43fdc433ab8" Oct 02 21:55:05 crc kubenswrapper[4636]: I1002 21:55:05.367309 4636 scope.go:117] "RemoveContainer" containerID="3ca210cd4e4ad1fc39c46e49750894bfb0e5bd4218ec01737b4ac838371f5221" Oct 02 21:55:06 crc kubenswrapper[4636]: I1002 21:55:06.061291 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xdk7c"] Oct 02 21:55:06 crc kubenswrapper[4636]: I1002 21:55:06.072031 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xdk7c"] Oct 02 21:55:07 crc kubenswrapper[4636]: I1002 21:55:07.613744 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a97ee2c8-04de-4293-b7f3-eb9ed870c4b5" path="/var/lib/kubelet/pods/a97ee2c8-04de-4293-b7f3-eb9ed870c4b5/volumes" Oct 02 21:55:29 crc kubenswrapper[4636]: I1002 21:55:29.453963 4636 generic.go:334] "Generic (PLEG): container finished" podID="0db4951d-65e4-4a9e-9761-433d6cfb17c9" containerID="3a3b55cfb985cbdc611b23f9b73ffee81b43d9ad04a4a9fd9496a46cb66d6bdf" exitCode=0 Oct 02 21:55:29 crc kubenswrapper[4636]: I1002 21:55:29.454068 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" event={"ID":"0db4951d-65e4-4a9e-9761-433d6cfb17c9","Type":"ContainerDied","Data":"3a3b55cfb985cbdc611b23f9b73ffee81b43d9ad04a4a9fd9496a46cb66d6bdf"} Oct 02 21:55:30 crc kubenswrapper[4636]: I1002 21:55:30.936981 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.051972 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-ssh-key\") pod \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.052179 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-inventory\") pod \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.052205 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzth8\" (UniqueName: \"kubernetes.io/projected/0db4951d-65e4-4a9e-9761-433d6cfb17c9-kube-api-access-wzth8\") pod \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\" (UID: \"0db4951d-65e4-4a9e-9761-433d6cfb17c9\") " Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.063639 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0db4951d-65e4-4a9e-9761-433d6cfb17c9-kube-api-access-wzth8" (OuterVolumeSpecName: "kube-api-access-wzth8") pod "0db4951d-65e4-4a9e-9761-433d6cfb17c9" (UID: "0db4951d-65e4-4a9e-9761-433d6cfb17c9"). InnerVolumeSpecName "kube-api-access-wzth8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.080323 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-inventory" (OuterVolumeSpecName: "inventory") pod "0db4951d-65e4-4a9e-9761-433d6cfb17c9" (UID: "0db4951d-65e4-4a9e-9761-433d6cfb17c9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.080704 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0db4951d-65e4-4a9e-9761-433d6cfb17c9" (UID: "0db4951d-65e4-4a9e-9761-433d6cfb17c9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.154198 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.154232 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0db4951d-65e4-4a9e-9761-433d6cfb17c9-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.154247 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzth8\" (UniqueName: \"kubernetes.io/projected/0db4951d-65e4-4a9e-9761-433d6cfb17c9-kube-api-access-wzth8\") on node \"crc\" DevicePath \"\"" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.473089 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" event={"ID":"0db4951d-65e4-4a9e-9761-433d6cfb17c9","Type":"ContainerDied","Data":"0827efc264650a34f1865f6f1b19009e4e36423b2c84eef47dba9bccf01d39cf"} Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.473150 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0827efc264650a34f1865f6f1b19009e4e36423b2c84eef47dba9bccf01d39cf" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.473186 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mcd5b" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.599491 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg"] Oct 02 21:55:31 crc kubenswrapper[4636]: E1002 21:55:31.599859 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0db4951d-65e4-4a9e-9761-433d6cfb17c9" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.599883 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="0db4951d-65e4-4a9e-9761-433d6cfb17c9" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.600080 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="0db4951d-65e4-4a9e-9761-433d6cfb17c9" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.600676 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.602912 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.604521 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.604707 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.608213 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.614332 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg"] Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.665583 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-27qbg\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.665720 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4wxb\" (UniqueName: \"kubernetes.io/projected/3306d272-f7a6-40b6-87c5-13ea12f59e31-kube-api-access-p4wxb\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-27qbg\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.665862 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-27qbg\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.767304 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-27qbg\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.767378 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4wxb\" (UniqueName: \"kubernetes.io/projected/3306d272-f7a6-40b6-87c5-13ea12f59e31-kube-api-access-p4wxb\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-27qbg\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.767444 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-27qbg\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.771553 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-27qbg\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.783570 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-27qbg\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.789874 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4wxb\" (UniqueName: \"kubernetes.io/projected/3306d272-f7a6-40b6-87c5-13ea12f59e31-kube-api-access-p4wxb\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-27qbg\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:55:31 crc kubenswrapper[4636]: I1002 21:55:31.959866 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:55:32 crc kubenswrapper[4636]: I1002 21:55:32.471764 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg"] Oct 02 21:55:32 crc kubenswrapper[4636]: I1002 21:55:32.481178 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" event={"ID":"3306d272-f7a6-40b6-87c5-13ea12f59e31","Type":"ContainerStarted","Data":"c086a9635e796ccabf311ccf7b419abdbfcdd0f3178fd6732affbda96571c699"} Oct 02 21:55:33 crc kubenswrapper[4636]: I1002 21:55:33.492552 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" event={"ID":"3306d272-f7a6-40b6-87c5-13ea12f59e31","Type":"ContainerStarted","Data":"768425a60c0761013b5caae96033bbd5ffa74d88ae43b193ce3f6bc67714f7bd"} Oct 02 21:55:33 crc kubenswrapper[4636]: I1002 21:55:33.528915 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" podStartSLOduration=2.382055502 podStartE2EDuration="2.528891112s" podCreationTimestamp="2025-10-02 21:55:31 +0000 UTC" firstStartedPulling="2025-10-02 21:55:32.470719029 +0000 UTC m=+1923.793727038" lastFinishedPulling="2025-10-02 21:55:32.617554639 +0000 UTC m=+1923.940562648" observedRunningTime="2025-10-02 21:55:33.520576701 +0000 UTC m=+1924.843584760" watchObservedRunningTime="2025-10-02 21:55:33.528891112 +0000 UTC m=+1924.851899131" Oct 02 21:55:34 crc kubenswrapper[4636]: I1002 21:55:34.047103 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rgdjq"] Oct 02 21:55:34 crc kubenswrapper[4636]: I1002 21:55:34.064915 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-bf96z"] Oct 02 21:55:34 crc kubenswrapper[4636]: I1002 21:55:34.073259 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rgdjq"] Oct 02 21:55:34 crc kubenswrapper[4636]: I1002 21:55:34.084435 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-bf96z"] Oct 02 21:55:35 crc kubenswrapper[4636]: I1002 21:55:35.615588 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f81ffd6-ca6c-4848-bb80-576172c2f647" path="/var/lib/kubelet/pods/3f81ffd6-ca6c-4848-bb80-576172c2f647/volumes" Oct 02 21:55:35 crc kubenswrapper[4636]: I1002 21:55:35.616642 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ea3d75f-eebd-4bb1-ae91-c460d4bac33f" path="/var/lib/kubelet/pods/9ea3d75f-eebd-4bb1-ae91-c460d4bac33f/volumes" Oct 02 21:56:05 crc kubenswrapper[4636]: I1002 21:56:05.521038 4636 scope.go:117] "RemoveContainer" containerID="9a4285b349cc1212dce26f7a49020db89ac85f45c91dd6c98e1dc70c316e2bba" Oct 02 21:56:05 crc kubenswrapper[4636]: I1002 21:56:05.574650 4636 scope.go:117] "RemoveContainer" containerID="aa4bceb21d8082f758a061347f1b63bf2aae4180a56de7fb8940ee1fd9aa2183" Oct 02 21:56:05 crc kubenswrapper[4636]: I1002 21:56:05.619482 4636 scope.go:117] "RemoveContainer" containerID="2907d819a6103e436640d6c422e993c2481d38378bf8e0961c8980e3edc96ddb" Oct 02 21:56:16 crc kubenswrapper[4636]: I1002 21:56:16.049070 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-b82lk"] Oct 02 21:56:16 crc kubenswrapper[4636]: I1002 21:56:16.057019 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-b82lk"] Oct 02 21:56:17 crc kubenswrapper[4636]: I1002 21:56:17.616160 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d28e7d5-138e-4b42-bc0a-d118cca25785" path="/var/lib/kubelet/pods/4d28e7d5-138e-4b42-bc0a-d118cca25785/volumes" Oct 02 21:56:23 crc kubenswrapper[4636]: I1002 21:56:23.117030 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:56:23 crc kubenswrapper[4636]: I1002 21:56:23.117864 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:56:34 crc kubenswrapper[4636]: I1002 21:56:34.101810 4636 generic.go:334] "Generic (PLEG): container finished" podID="3306d272-f7a6-40b6-87c5-13ea12f59e31" containerID="768425a60c0761013b5caae96033bbd5ffa74d88ae43b193ce3f6bc67714f7bd" exitCode=2 Oct 02 21:56:34 crc kubenswrapper[4636]: I1002 21:56:34.101906 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" event={"ID":"3306d272-f7a6-40b6-87c5-13ea12f59e31","Type":"ContainerDied","Data":"768425a60c0761013b5caae96033bbd5ffa74d88ae43b193ce3f6bc67714f7bd"} Oct 02 21:56:35 crc kubenswrapper[4636]: I1002 21:56:35.558476 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:56:35 crc kubenswrapper[4636]: I1002 21:56:35.604645 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-inventory\") pod \"3306d272-f7a6-40b6-87c5-13ea12f59e31\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " Oct 02 21:56:35 crc kubenswrapper[4636]: I1002 21:56:35.605046 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-ssh-key\") pod \"3306d272-f7a6-40b6-87c5-13ea12f59e31\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " Oct 02 21:56:35 crc kubenswrapper[4636]: I1002 21:56:35.605100 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4wxb\" (UniqueName: \"kubernetes.io/projected/3306d272-f7a6-40b6-87c5-13ea12f59e31-kube-api-access-p4wxb\") pod \"3306d272-f7a6-40b6-87c5-13ea12f59e31\" (UID: \"3306d272-f7a6-40b6-87c5-13ea12f59e31\") " Oct 02 21:56:35 crc kubenswrapper[4636]: I1002 21:56:35.614991 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3306d272-f7a6-40b6-87c5-13ea12f59e31-kube-api-access-p4wxb" (OuterVolumeSpecName: "kube-api-access-p4wxb") pod "3306d272-f7a6-40b6-87c5-13ea12f59e31" (UID: "3306d272-f7a6-40b6-87c5-13ea12f59e31"). InnerVolumeSpecName "kube-api-access-p4wxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:56:35 crc kubenswrapper[4636]: I1002 21:56:35.654823 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-inventory" (OuterVolumeSpecName: "inventory") pod "3306d272-f7a6-40b6-87c5-13ea12f59e31" (UID: "3306d272-f7a6-40b6-87c5-13ea12f59e31"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:56:35 crc kubenswrapper[4636]: I1002 21:56:35.656992 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3306d272-f7a6-40b6-87c5-13ea12f59e31" (UID: "3306d272-f7a6-40b6-87c5-13ea12f59e31"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:56:35 crc kubenswrapper[4636]: I1002 21:56:35.709320 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:56:35 crc kubenswrapper[4636]: I1002 21:56:35.709661 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4wxb\" (UniqueName: \"kubernetes.io/projected/3306d272-f7a6-40b6-87c5-13ea12f59e31-kube-api-access-p4wxb\") on node \"crc\" DevicePath \"\"" Oct 02 21:56:35 crc kubenswrapper[4636]: I1002 21:56:35.709846 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3306d272-f7a6-40b6-87c5-13ea12f59e31-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:56:36 crc kubenswrapper[4636]: I1002 21:56:36.126408 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" event={"ID":"3306d272-f7a6-40b6-87c5-13ea12f59e31","Type":"ContainerDied","Data":"c086a9635e796ccabf311ccf7b419abdbfcdd0f3178fd6732affbda96571c699"} Oct 02 21:56:36 crc kubenswrapper[4636]: I1002 21:56:36.126449 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c086a9635e796ccabf311ccf7b419abdbfcdd0f3178fd6732affbda96571c699" Oct 02 21:56:36 crc kubenswrapper[4636]: I1002 21:56:36.126489 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-27qbg" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.030596 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466"] Oct 02 21:56:43 crc kubenswrapper[4636]: E1002 21:56:43.031741 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3306d272-f7a6-40b6-87c5-13ea12f59e31" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.031791 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="3306d272-f7a6-40b6-87c5-13ea12f59e31" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.032178 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="3306d272-f7a6-40b6-87c5-13ea12f59e31" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.033220 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.037973 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.038280 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.038802 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.041140 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.042560 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466"] Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.161772 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-d7466\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.161843 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8mmp\" (UniqueName: \"kubernetes.io/projected/593f05c7-4207-4893-9fe1-7487a5a7718c-kube-api-access-n8mmp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-d7466\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.161895 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-d7466\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.263205 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-d7466\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.263267 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8mmp\" (UniqueName: \"kubernetes.io/projected/593f05c7-4207-4893-9fe1-7487a5a7718c-kube-api-access-n8mmp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-d7466\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.263312 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-d7466\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.269825 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-d7466\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.276363 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-d7466\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.288443 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8mmp\" (UniqueName: \"kubernetes.io/projected/593f05c7-4207-4893-9fe1-7487a5a7718c-kube-api-access-n8mmp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-d7466\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.355043 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.926981 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 21:56:43 crc kubenswrapper[4636]: I1002 21:56:43.930270 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466"] Oct 02 21:56:44 crc kubenswrapper[4636]: I1002 21:56:44.205492 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" event={"ID":"593f05c7-4207-4893-9fe1-7487a5a7718c","Type":"ContainerStarted","Data":"729d21a1ee0baab8223f2ed852bb78c05d0e210fe4503d3be7975419c9769dc6"} Oct 02 21:56:45 crc kubenswrapper[4636]: I1002 21:56:45.216971 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" event={"ID":"593f05c7-4207-4893-9fe1-7487a5a7718c","Type":"ContainerStarted","Data":"e37e6dce50d40f929f7c757f92889239007ae6482e8a247d5e0d40ca8c69de57"} Oct 02 21:56:45 crc kubenswrapper[4636]: I1002 21:56:45.241124 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" podStartSLOduration=2.081657 podStartE2EDuration="2.241102834s" podCreationTimestamp="2025-10-02 21:56:43 +0000 UTC" firstStartedPulling="2025-10-02 21:56:43.926772299 +0000 UTC m=+1995.249780318" lastFinishedPulling="2025-10-02 21:56:44.086218133 +0000 UTC m=+1995.409226152" observedRunningTime="2025-10-02 21:56:45.238454354 +0000 UTC m=+1996.561462373" watchObservedRunningTime="2025-10-02 21:56:45.241102834 +0000 UTC m=+1996.564110863" Oct 02 21:56:53 crc kubenswrapper[4636]: I1002 21:56:53.118056 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:56:53 crc kubenswrapper[4636]: I1002 21:56:53.118786 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:57:05 crc kubenswrapper[4636]: I1002 21:57:05.733453 4636 scope.go:117] "RemoveContainer" containerID="4ac9f7567672b639710bb86a4936c3ee21159d9efc95cbc3c25626988f3ebae2" Oct 02 21:57:23 crc kubenswrapper[4636]: I1002 21:57:23.117290 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:57:23 crc kubenswrapper[4636]: I1002 21:57:23.118050 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:57:23 crc kubenswrapper[4636]: I1002 21:57:23.118159 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 21:57:23 crc kubenswrapper[4636]: I1002 21:57:23.119288 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c6da1ee0722f89363964853239734f7c0beeeafaa1f6842cc16f9a803dfcc9e2"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 21:57:23 crc kubenswrapper[4636]: I1002 21:57:23.119577 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://c6da1ee0722f89363964853239734f7c0beeeafaa1f6842cc16f9a803dfcc9e2" gracePeriod=600 Oct 02 21:57:23 crc kubenswrapper[4636]: I1002 21:57:23.575276 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="c6da1ee0722f89363964853239734f7c0beeeafaa1f6842cc16f9a803dfcc9e2" exitCode=0 Oct 02 21:57:23 crc kubenswrapper[4636]: I1002 21:57:23.575490 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"c6da1ee0722f89363964853239734f7c0beeeafaa1f6842cc16f9a803dfcc9e2"} Oct 02 21:57:23 crc kubenswrapper[4636]: I1002 21:57:23.575658 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35"} Oct 02 21:57:23 crc kubenswrapper[4636]: I1002 21:57:23.575694 4636 scope.go:117] "RemoveContainer" containerID="8a9e20b9ee04588b08bfc87d408d354c3a8240b74a61900b89e017b533998416" Oct 02 21:57:39 crc kubenswrapper[4636]: I1002 21:57:39.734333 4636 generic.go:334] "Generic (PLEG): container finished" podID="593f05c7-4207-4893-9fe1-7487a5a7718c" containerID="e37e6dce50d40f929f7c757f92889239007ae6482e8a247d5e0d40ca8c69de57" exitCode=0 Oct 02 21:57:39 crc kubenswrapper[4636]: I1002 21:57:39.734416 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" event={"ID":"593f05c7-4207-4893-9fe1-7487a5a7718c","Type":"ContainerDied","Data":"e37e6dce50d40f929f7c757f92889239007ae6482e8a247d5e0d40ca8c69de57"} Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.158789 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.208545 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8mmp\" (UniqueName: \"kubernetes.io/projected/593f05c7-4207-4893-9fe1-7487a5a7718c-kube-api-access-n8mmp\") pod \"593f05c7-4207-4893-9fe1-7487a5a7718c\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.208758 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-ssh-key\") pod \"593f05c7-4207-4893-9fe1-7487a5a7718c\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.208909 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-inventory\") pod \"593f05c7-4207-4893-9fe1-7487a5a7718c\" (UID: \"593f05c7-4207-4893-9fe1-7487a5a7718c\") " Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.215336 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/593f05c7-4207-4893-9fe1-7487a5a7718c-kube-api-access-n8mmp" (OuterVolumeSpecName: "kube-api-access-n8mmp") pod "593f05c7-4207-4893-9fe1-7487a5a7718c" (UID: "593f05c7-4207-4893-9fe1-7487a5a7718c"). InnerVolumeSpecName "kube-api-access-n8mmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.239209 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-inventory" (OuterVolumeSpecName: "inventory") pod "593f05c7-4207-4893-9fe1-7487a5a7718c" (UID: "593f05c7-4207-4893-9fe1-7487a5a7718c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.244920 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "593f05c7-4207-4893-9fe1-7487a5a7718c" (UID: "593f05c7-4207-4893-9fe1-7487a5a7718c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.311421 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.311456 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8mmp\" (UniqueName: \"kubernetes.io/projected/593f05c7-4207-4893-9fe1-7487a5a7718c-kube-api-access-n8mmp\") on node \"crc\" DevicePath \"\"" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.311467 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/593f05c7-4207-4893-9fe1-7487a5a7718c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.756366 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" event={"ID":"593f05c7-4207-4893-9fe1-7487a5a7718c","Type":"ContainerDied","Data":"729d21a1ee0baab8223f2ed852bb78c05d0e210fe4503d3be7975419c9769dc6"} Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.756407 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="729d21a1ee0baab8223f2ed852bb78c05d0e210fe4503d3be7975419c9769dc6" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.756462 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-d7466" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.835341 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-27dxg"] Oct 02 21:57:41 crc kubenswrapper[4636]: E1002 21:57:41.835899 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="593f05c7-4207-4893-9fe1-7487a5a7718c" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.835918 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="593f05c7-4207-4893-9fe1-7487a5a7718c" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.836287 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="593f05c7-4207-4893-9fe1-7487a5a7718c" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.837246 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.839344 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.840687 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.841643 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.845064 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.852014 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-27dxg"] Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.924564 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-27dxg\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.924853 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsv9c\" (UniqueName: \"kubernetes.io/projected/5a7476b3-58fe-4cf8-9d90-605f10c40e05-kube-api-access-xsv9c\") pod \"ssh-known-hosts-edpm-deployment-27dxg\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:41 crc kubenswrapper[4636]: I1002 21:57:41.925077 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-27dxg\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:42 crc kubenswrapper[4636]: I1002 21:57:42.027124 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-27dxg\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:42 crc kubenswrapper[4636]: I1002 21:57:42.027230 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsv9c\" (UniqueName: \"kubernetes.io/projected/5a7476b3-58fe-4cf8-9d90-605f10c40e05-kube-api-access-xsv9c\") pod \"ssh-known-hosts-edpm-deployment-27dxg\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:42 crc kubenswrapper[4636]: I1002 21:57:42.027271 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-27dxg\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:42 crc kubenswrapper[4636]: I1002 21:57:42.031363 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-27dxg\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:42 crc kubenswrapper[4636]: I1002 21:57:42.033004 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-27dxg\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:42 crc kubenswrapper[4636]: I1002 21:57:42.047735 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsv9c\" (UniqueName: \"kubernetes.io/projected/5a7476b3-58fe-4cf8-9d90-605f10c40e05-kube-api-access-xsv9c\") pod \"ssh-known-hosts-edpm-deployment-27dxg\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:42 crc kubenswrapper[4636]: I1002 21:57:42.154503 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:42 crc kubenswrapper[4636]: I1002 21:57:42.687435 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-27dxg"] Oct 02 21:57:42 crc kubenswrapper[4636]: I1002 21:57:42.767154 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" event={"ID":"5a7476b3-58fe-4cf8-9d90-605f10c40e05","Type":"ContainerStarted","Data":"0a1782001b0bb527c5417bcc1d1eb989cdab3ba7628121c998e29f910510e1ea"} Oct 02 21:57:43 crc kubenswrapper[4636]: I1002 21:57:43.777935 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" event={"ID":"5a7476b3-58fe-4cf8-9d90-605f10c40e05","Type":"ContainerStarted","Data":"8ca5b89335c26abc530369ed8a70beddd805601ed64487d7d363f420217a53ca"} Oct 02 21:57:51 crc kubenswrapper[4636]: I1002 21:57:51.851684 4636 generic.go:334] "Generic (PLEG): container finished" podID="5a7476b3-58fe-4cf8-9d90-605f10c40e05" containerID="8ca5b89335c26abc530369ed8a70beddd805601ed64487d7d363f420217a53ca" exitCode=0 Oct 02 21:57:51 crc kubenswrapper[4636]: I1002 21:57:51.851729 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" event={"ID":"5a7476b3-58fe-4cf8-9d90-605f10c40e05","Type":"ContainerDied","Data":"8ca5b89335c26abc530369ed8a70beddd805601ed64487d7d363f420217a53ca"} Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.255874 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.374837 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-inventory-0\") pod \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.375002 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsv9c\" (UniqueName: \"kubernetes.io/projected/5a7476b3-58fe-4cf8-9d90-605f10c40e05-kube-api-access-xsv9c\") pod \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.375028 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-ssh-key-openstack-edpm-ipam\") pod \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\" (UID: \"5a7476b3-58fe-4cf8-9d90-605f10c40e05\") " Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.381029 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a7476b3-58fe-4cf8-9d90-605f10c40e05-kube-api-access-xsv9c" (OuterVolumeSpecName: "kube-api-access-xsv9c") pod "5a7476b3-58fe-4cf8-9d90-605f10c40e05" (UID: "5a7476b3-58fe-4cf8-9d90-605f10c40e05"). InnerVolumeSpecName "kube-api-access-xsv9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.403827 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "5a7476b3-58fe-4cf8-9d90-605f10c40e05" (UID: "5a7476b3-58fe-4cf8-9d90-605f10c40e05"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.404456 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "5a7476b3-58fe-4cf8-9d90-605f10c40e05" (UID: "5a7476b3-58fe-4cf8-9d90-605f10c40e05"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.478124 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsv9c\" (UniqueName: \"kubernetes.io/projected/5a7476b3-58fe-4cf8-9d90-605f10c40e05-kube-api-access-xsv9c\") on node \"crc\" DevicePath \"\"" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.478163 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.478180 4636 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5a7476b3-58fe-4cf8-9d90-605f10c40e05-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.892964 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" event={"ID":"5a7476b3-58fe-4cf8-9d90-605f10c40e05","Type":"ContainerDied","Data":"0a1782001b0bb527c5417bcc1d1eb989cdab3ba7628121c998e29f910510e1ea"} Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.893013 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a1782001b0bb527c5417bcc1d1eb989cdab3ba7628121c998e29f910510e1ea" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.893103 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-27dxg" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.974236 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m"] Oct 02 21:57:53 crc kubenswrapper[4636]: E1002 21:57:53.974630 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a7476b3-58fe-4cf8-9d90-605f10c40e05" containerName="ssh-known-hosts-edpm-deployment" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.974647 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a7476b3-58fe-4cf8-9d90-605f10c40e05" containerName="ssh-known-hosts-edpm-deployment" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.974867 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a7476b3-58fe-4cf8-9d90-605f10c40e05" containerName="ssh-known-hosts-edpm-deployment" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.975458 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.979931 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.979971 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.980234 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.990915 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:57:53 crc kubenswrapper[4636]: I1002 21:57:53.994886 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m"] Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.091268 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4hs6m\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.091578 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz25m\" (UniqueName: \"kubernetes.io/projected/9484ec45-abcc-487a-9790-92f39c74c829-kube-api-access-sz25m\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4hs6m\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.091642 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4hs6m\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.193313 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz25m\" (UniqueName: \"kubernetes.io/projected/9484ec45-abcc-487a-9790-92f39c74c829-kube-api-access-sz25m\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4hs6m\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.193406 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4hs6m\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.193470 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4hs6m\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.196829 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4hs6m\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.197589 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4hs6m\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.223507 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz25m\" (UniqueName: \"kubernetes.io/projected/9484ec45-abcc-487a-9790-92f39c74c829-kube-api-access-sz25m\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4hs6m\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.299460 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.807853 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m"] Oct 02 21:57:54 crc kubenswrapper[4636]: I1002 21:57:54.902435 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" event={"ID":"9484ec45-abcc-487a-9790-92f39c74c829","Type":"ContainerStarted","Data":"ba55f1ab7646e4c18165667b3abbca77b36757eda0a31aee0c2f2303e0adeebc"} Oct 02 21:57:55 crc kubenswrapper[4636]: I1002 21:57:55.913198 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" event={"ID":"9484ec45-abcc-487a-9790-92f39c74c829","Type":"ContainerStarted","Data":"316a7263280a1ca4151b794edada33dad19143051ea9f2d40daf7c34639d3dcf"} Oct 02 21:57:55 crc kubenswrapper[4636]: I1002 21:57:55.932300 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" podStartSLOduration=2.7393226779999997 podStartE2EDuration="2.932283272s" podCreationTimestamp="2025-10-02 21:57:53 +0000 UTC" firstStartedPulling="2025-10-02 21:57:54.822350256 +0000 UTC m=+2066.145358275" lastFinishedPulling="2025-10-02 21:57:55.01531085 +0000 UTC m=+2066.338318869" observedRunningTime="2025-10-02 21:57:55.928300976 +0000 UTC m=+2067.251308995" watchObservedRunningTime="2025-10-02 21:57:55.932283272 +0000 UTC m=+2067.255291291" Oct 02 21:58:06 crc kubenswrapper[4636]: I1002 21:58:06.025452 4636 generic.go:334] "Generic (PLEG): container finished" podID="9484ec45-abcc-487a-9790-92f39c74c829" containerID="316a7263280a1ca4151b794edada33dad19143051ea9f2d40daf7c34639d3dcf" exitCode=0 Oct 02 21:58:06 crc kubenswrapper[4636]: I1002 21:58:06.025520 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" event={"ID":"9484ec45-abcc-487a-9790-92f39c74c829","Type":"ContainerDied","Data":"316a7263280a1ca4151b794edada33dad19143051ea9f2d40daf7c34639d3dcf"} Oct 02 21:58:07 crc kubenswrapper[4636]: I1002 21:58:07.429346 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:58:07 crc kubenswrapper[4636]: I1002 21:58:07.542520 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-ssh-key\") pod \"9484ec45-abcc-487a-9790-92f39c74c829\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " Oct 02 21:58:07 crc kubenswrapper[4636]: I1002 21:58:07.542675 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sz25m\" (UniqueName: \"kubernetes.io/projected/9484ec45-abcc-487a-9790-92f39c74c829-kube-api-access-sz25m\") pod \"9484ec45-abcc-487a-9790-92f39c74c829\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " Oct 02 21:58:07 crc kubenswrapper[4636]: I1002 21:58:07.542712 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-inventory\") pod \"9484ec45-abcc-487a-9790-92f39c74c829\" (UID: \"9484ec45-abcc-487a-9790-92f39c74c829\") " Oct 02 21:58:07 crc kubenswrapper[4636]: I1002 21:58:07.552518 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9484ec45-abcc-487a-9790-92f39c74c829-kube-api-access-sz25m" (OuterVolumeSpecName: "kube-api-access-sz25m") pod "9484ec45-abcc-487a-9790-92f39c74c829" (UID: "9484ec45-abcc-487a-9790-92f39c74c829"). InnerVolumeSpecName "kube-api-access-sz25m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:58:07 crc kubenswrapper[4636]: I1002 21:58:07.574920 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9484ec45-abcc-487a-9790-92f39c74c829" (UID: "9484ec45-abcc-487a-9790-92f39c74c829"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:58:07 crc kubenswrapper[4636]: I1002 21:58:07.578804 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-inventory" (OuterVolumeSpecName: "inventory") pod "9484ec45-abcc-487a-9790-92f39c74c829" (UID: "9484ec45-abcc-487a-9790-92f39c74c829"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:58:07 crc kubenswrapper[4636]: I1002 21:58:07.645110 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:58:07 crc kubenswrapper[4636]: I1002 21:58:07.645136 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sz25m\" (UniqueName: \"kubernetes.io/projected/9484ec45-abcc-487a-9790-92f39c74c829-kube-api-access-sz25m\") on node \"crc\" DevicePath \"\"" Oct 02 21:58:07 crc kubenswrapper[4636]: I1002 21:58:07.645146 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9484ec45-abcc-487a-9790-92f39c74c829-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.048430 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" event={"ID":"9484ec45-abcc-487a-9790-92f39c74c829","Type":"ContainerDied","Data":"ba55f1ab7646e4c18165667b3abbca77b36757eda0a31aee0c2f2303e0adeebc"} Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.048911 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba55f1ab7646e4c18165667b3abbca77b36757eda0a31aee0c2f2303e0adeebc" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.048536 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4hs6m" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.141850 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95"] Oct 02 21:58:08 crc kubenswrapper[4636]: E1002 21:58:08.142408 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9484ec45-abcc-487a-9790-92f39c74c829" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.142439 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="9484ec45-abcc-487a-9790-92f39c74c829" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.142876 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="9484ec45-abcc-487a-9790-92f39c74c829" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.143880 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.147559 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.147930 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.148115 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.148935 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.158420 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95"] Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.258478 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-plk95\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.258545 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-plk95\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.258638 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngpxw\" (UniqueName: \"kubernetes.io/projected/fa386dd8-b89e-416f-a564-c4b3396fde09-kube-api-access-ngpxw\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-plk95\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.360449 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-plk95\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.360514 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngpxw\" (UniqueName: \"kubernetes.io/projected/fa386dd8-b89e-416f-a564-c4b3396fde09-kube-api-access-ngpxw\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-plk95\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.360636 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-plk95\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.364853 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-plk95\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.369709 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-plk95\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.394940 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngpxw\" (UniqueName: \"kubernetes.io/projected/fa386dd8-b89e-416f-a564-c4b3396fde09-kube-api-access-ngpxw\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-plk95\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.461267 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:08 crc kubenswrapper[4636]: I1002 21:58:08.971594 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95"] Oct 02 21:58:09 crc kubenswrapper[4636]: I1002 21:58:09.058624 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" event={"ID":"fa386dd8-b89e-416f-a564-c4b3396fde09","Type":"ContainerStarted","Data":"c9b817d66cf7a8be751d331390d5d74e55b56cd4cd1a1886178c33ee4d17ab37"} Oct 02 21:58:10 crc kubenswrapper[4636]: I1002 21:58:10.101411 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" event={"ID":"fa386dd8-b89e-416f-a564-c4b3396fde09","Type":"ContainerStarted","Data":"faaa641b53bea0e4ef810ea1606de415d1a805144c72ff6552d50441104be40e"} Oct 02 21:58:10 crc kubenswrapper[4636]: I1002 21:58:10.121873 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" podStartSLOduration=1.904014553 podStartE2EDuration="2.121853557s" podCreationTimestamp="2025-10-02 21:58:08 +0000 UTC" firstStartedPulling="2025-10-02 21:58:08.981656367 +0000 UTC m=+2080.304664396" lastFinishedPulling="2025-10-02 21:58:09.199495381 +0000 UTC m=+2080.522503400" observedRunningTime="2025-10-02 21:58:10.117496411 +0000 UTC m=+2081.440504470" watchObservedRunningTime="2025-10-02 21:58:10.121853557 +0000 UTC m=+2081.444861576" Oct 02 21:58:20 crc kubenswrapper[4636]: I1002 21:58:20.198726 4636 generic.go:334] "Generic (PLEG): container finished" podID="fa386dd8-b89e-416f-a564-c4b3396fde09" containerID="faaa641b53bea0e4ef810ea1606de415d1a805144c72ff6552d50441104be40e" exitCode=0 Oct 02 21:58:20 crc kubenswrapper[4636]: I1002 21:58:20.198858 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" event={"ID":"fa386dd8-b89e-416f-a564-c4b3396fde09","Type":"ContainerDied","Data":"faaa641b53bea0e4ef810ea1606de415d1a805144c72ff6552d50441104be40e"} Oct 02 21:58:21 crc kubenswrapper[4636]: I1002 21:58:21.693270 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:21 crc kubenswrapper[4636]: I1002 21:58:21.819302 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngpxw\" (UniqueName: \"kubernetes.io/projected/fa386dd8-b89e-416f-a564-c4b3396fde09-kube-api-access-ngpxw\") pod \"fa386dd8-b89e-416f-a564-c4b3396fde09\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " Oct 02 21:58:21 crc kubenswrapper[4636]: I1002 21:58:21.819663 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-ssh-key\") pod \"fa386dd8-b89e-416f-a564-c4b3396fde09\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " Oct 02 21:58:21 crc kubenswrapper[4636]: I1002 21:58:21.819857 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-inventory\") pod \"fa386dd8-b89e-416f-a564-c4b3396fde09\" (UID: \"fa386dd8-b89e-416f-a564-c4b3396fde09\") " Oct 02 21:58:21 crc kubenswrapper[4636]: I1002 21:58:21.827356 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa386dd8-b89e-416f-a564-c4b3396fde09-kube-api-access-ngpxw" (OuterVolumeSpecName: "kube-api-access-ngpxw") pod "fa386dd8-b89e-416f-a564-c4b3396fde09" (UID: "fa386dd8-b89e-416f-a564-c4b3396fde09"). InnerVolumeSpecName "kube-api-access-ngpxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:58:21 crc kubenswrapper[4636]: I1002 21:58:21.850261 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-inventory" (OuterVolumeSpecName: "inventory") pod "fa386dd8-b89e-416f-a564-c4b3396fde09" (UID: "fa386dd8-b89e-416f-a564-c4b3396fde09"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:58:21 crc kubenswrapper[4636]: I1002 21:58:21.852447 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fa386dd8-b89e-416f-a564-c4b3396fde09" (UID: "fa386dd8-b89e-416f-a564-c4b3396fde09"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:58:21 crc kubenswrapper[4636]: I1002 21:58:21.923000 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:58:21 crc kubenswrapper[4636]: I1002 21:58:21.923049 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngpxw\" (UniqueName: \"kubernetes.io/projected/fa386dd8-b89e-416f-a564-c4b3396fde09-kube-api-access-ngpxw\") on node \"crc\" DevicePath \"\"" Oct 02 21:58:21 crc kubenswrapper[4636]: I1002 21:58:21.923069 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa386dd8-b89e-416f-a564-c4b3396fde09-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.231346 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" event={"ID":"fa386dd8-b89e-416f-a564-c4b3396fde09","Type":"ContainerDied","Data":"c9b817d66cf7a8be751d331390d5d74e55b56cd4cd1a1886178c33ee4d17ab37"} Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.231382 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-plk95" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.231413 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9b817d66cf7a8be751d331390d5d74e55b56cd4cd1a1886178c33ee4d17ab37" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.359248 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf"] Oct 02 21:58:22 crc kubenswrapper[4636]: E1002 21:58:22.359712 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa386dd8-b89e-416f-a564-c4b3396fde09" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.359735 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa386dd8-b89e-416f-a564-c4b3396fde09" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.360003 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa386dd8-b89e-416f-a564-c4b3396fde09" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.360845 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.375814 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.376431 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.377016 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.377194 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.377379 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.377717 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.377955 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.378054 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.388128 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf"] Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.431875 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.431953 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432009 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432037 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432054 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432086 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432143 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v8tz\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-kube-api-access-5v8tz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432209 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432267 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432358 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432410 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432521 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432595 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.432649 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.533722 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.533790 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.533852 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.534294 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.534321 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.534367 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.534390 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.534425 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.534468 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.534487 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.534504 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.534519 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v8tz\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-kube-api-access-5v8tz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.534540 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.534565 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.538471 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.540307 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.540611 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.540730 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.541625 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.542114 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.546374 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.546545 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.547461 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.547637 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.547861 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.549380 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.553339 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v8tz\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-kube-api-access-5v8tz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.553859 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:22 crc kubenswrapper[4636]: I1002 21:58:22.693834 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:58:23 crc kubenswrapper[4636]: I1002 21:58:23.281148 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf"] Oct 02 21:58:24 crc kubenswrapper[4636]: I1002 21:58:24.252869 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" event={"ID":"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0","Type":"ContainerStarted","Data":"314b794e7f42c1ab6cec2458a9f96ba1509d9c29b1a75563fcb239f3c2fc5206"} Oct 02 21:58:24 crc kubenswrapper[4636]: I1002 21:58:24.253425 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" event={"ID":"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0","Type":"ContainerStarted","Data":"6dbdc9029749c4bb19a75e39bee0f9fe8a767f161f29ece6d8d2814b677e1233"} Oct 02 21:58:24 crc kubenswrapper[4636]: I1002 21:58:24.288442 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" podStartSLOduration=2.112362255 podStartE2EDuration="2.28841889s" podCreationTimestamp="2025-10-02 21:58:22 +0000 UTC" firstStartedPulling="2025-10-02 21:58:23.304632555 +0000 UTC m=+2094.627640594" lastFinishedPulling="2025-10-02 21:58:23.48068919 +0000 UTC m=+2094.803697229" observedRunningTime="2025-10-02 21:58:24.2744689 +0000 UTC m=+2095.597476919" watchObservedRunningTime="2025-10-02 21:58:24.28841889 +0000 UTC m=+2095.611426909" Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.529871 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-996gp"] Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.533127 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.596046 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-996gp"] Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.686680 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-catalog-content\") pod \"redhat-operators-996gp\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.686979 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-utilities\") pod \"redhat-operators-996gp\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.687187 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqcrl\" (UniqueName: \"kubernetes.io/projected/b8e39dfe-39a0-4105-83da-e1b0770728f6-kube-api-access-pqcrl\") pod \"redhat-operators-996gp\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.788415 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-catalog-content\") pod \"redhat-operators-996gp\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.788590 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-utilities\") pod \"redhat-operators-996gp\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.788692 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqcrl\" (UniqueName: \"kubernetes.io/projected/b8e39dfe-39a0-4105-83da-e1b0770728f6-kube-api-access-pqcrl\") pod \"redhat-operators-996gp\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.789197 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-catalog-content\") pod \"redhat-operators-996gp\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.789288 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-utilities\") pod \"redhat-operators-996gp\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.815902 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqcrl\" (UniqueName: \"kubernetes.io/projected/b8e39dfe-39a0-4105-83da-e1b0770728f6-kube-api-access-pqcrl\") pod \"redhat-operators-996gp\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:33 crc kubenswrapper[4636]: I1002 21:58:33.868052 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:34 crc kubenswrapper[4636]: I1002 21:58:34.336188 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-996gp"] Oct 02 21:58:34 crc kubenswrapper[4636]: I1002 21:58:34.373169 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-996gp" event={"ID":"b8e39dfe-39a0-4105-83da-e1b0770728f6","Type":"ContainerStarted","Data":"d370b8926f3d751bc2391702bb340526ea8956f463b79b2545478865b6b53464"} Oct 02 21:58:35 crc kubenswrapper[4636]: I1002 21:58:35.385077 4636 generic.go:334] "Generic (PLEG): container finished" podID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerID="ee00a7dd0f5b5f5602b3b61e70f4ddd979c6ac34b531d7065dd63f66a581897e" exitCode=0 Oct 02 21:58:35 crc kubenswrapper[4636]: I1002 21:58:35.385239 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-996gp" event={"ID":"b8e39dfe-39a0-4105-83da-e1b0770728f6","Type":"ContainerDied","Data":"ee00a7dd0f5b5f5602b3b61e70f4ddd979c6ac34b531d7065dd63f66a581897e"} Oct 02 21:58:37 crc kubenswrapper[4636]: I1002 21:58:37.411189 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-996gp" event={"ID":"b8e39dfe-39a0-4105-83da-e1b0770728f6","Type":"ContainerStarted","Data":"8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9"} Oct 02 21:58:40 crc kubenswrapper[4636]: I1002 21:58:40.455546 4636 generic.go:334] "Generic (PLEG): container finished" podID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerID="8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9" exitCode=0 Oct 02 21:58:40 crc kubenswrapper[4636]: I1002 21:58:40.455617 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-996gp" event={"ID":"b8e39dfe-39a0-4105-83da-e1b0770728f6","Type":"ContainerDied","Data":"8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9"} Oct 02 21:58:41 crc kubenswrapper[4636]: I1002 21:58:41.465675 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-996gp" event={"ID":"b8e39dfe-39a0-4105-83da-e1b0770728f6","Type":"ContainerStarted","Data":"df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765"} Oct 02 21:58:41 crc kubenswrapper[4636]: I1002 21:58:41.484996 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-996gp" podStartSLOduration=2.900716559 podStartE2EDuration="8.484981062s" podCreationTimestamp="2025-10-02 21:58:33 +0000 UTC" firstStartedPulling="2025-10-02 21:58:35.39199053 +0000 UTC m=+2106.714998549" lastFinishedPulling="2025-10-02 21:58:40.976255033 +0000 UTC m=+2112.299263052" observedRunningTime="2025-10-02 21:58:41.482231469 +0000 UTC m=+2112.805239498" watchObservedRunningTime="2025-10-02 21:58:41.484981062 +0000 UTC m=+2112.807989081" Oct 02 21:58:43 crc kubenswrapper[4636]: I1002 21:58:43.869876 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:43 crc kubenswrapper[4636]: I1002 21:58:43.870179 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:58:44 crc kubenswrapper[4636]: I1002 21:58:44.917827 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-996gp" podUID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerName="registry-server" probeResult="failure" output=< Oct 02 21:58:44 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 21:58:44 crc kubenswrapper[4636]: > Oct 02 21:58:54 crc kubenswrapper[4636]: I1002 21:58:54.927574 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-996gp" podUID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerName="registry-server" probeResult="failure" output=< Oct 02 21:58:54 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 21:58:54 crc kubenswrapper[4636]: > Oct 02 21:59:03 crc kubenswrapper[4636]: I1002 21:59:03.929761 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:59:03 crc kubenswrapper[4636]: I1002 21:59:03.978651 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:59:04 crc kubenswrapper[4636]: I1002 21:59:04.728906 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-996gp"] Oct 02 21:59:05 crc kubenswrapper[4636]: I1002 21:59:05.677107 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-996gp" podUID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerName="registry-server" containerID="cri-o://df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765" gracePeriod=2 Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.084480 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.278457 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqcrl\" (UniqueName: \"kubernetes.io/projected/b8e39dfe-39a0-4105-83da-e1b0770728f6-kube-api-access-pqcrl\") pod \"b8e39dfe-39a0-4105-83da-e1b0770728f6\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.279552 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-catalog-content\") pod \"b8e39dfe-39a0-4105-83da-e1b0770728f6\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.279587 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-utilities\") pod \"b8e39dfe-39a0-4105-83da-e1b0770728f6\" (UID: \"b8e39dfe-39a0-4105-83da-e1b0770728f6\") " Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.280237 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-utilities" (OuterVolumeSpecName: "utilities") pod "b8e39dfe-39a0-4105-83da-e1b0770728f6" (UID: "b8e39dfe-39a0-4105-83da-e1b0770728f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.285628 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8e39dfe-39a0-4105-83da-e1b0770728f6-kube-api-access-pqcrl" (OuterVolumeSpecName: "kube-api-access-pqcrl") pod "b8e39dfe-39a0-4105-83da-e1b0770728f6" (UID: "b8e39dfe-39a0-4105-83da-e1b0770728f6"). InnerVolumeSpecName "kube-api-access-pqcrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.364289 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8e39dfe-39a0-4105-83da-e1b0770728f6" (UID: "b8e39dfe-39a0-4105-83da-e1b0770728f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.381630 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqcrl\" (UniqueName: \"kubernetes.io/projected/b8e39dfe-39a0-4105-83da-e1b0770728f6-kube-api-access-pqcrl\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.381667 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.381680 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8e39dfe-39a0-4105-83da-e1b0770728f6-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.692153 4636 generic.go:334] "Generic (PLEG): container finished" podID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerID="df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765" exitCode=0 Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.692208 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-996gp" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.692272 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-996gp" event={"ID":"b8e39dfe-39a0-4105-83da-e1b0770728f6","Type":"ContainerDied","Data":"df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765"} Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.692321 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-996gp" event={"ID":"b8e39dfe-39a0-4105-83da-e1b0770728f6","Type":"ContainerDied","Data":"d370b8926f3d751bc2391702bb340526ea8956f463b79b2545478865b6b53464"} Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.692356 4636 scope.go:117] "RemoveContainer" containerID="df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.743200 4636 scope.go:117] "RemoveContainer" containerID="8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.743381 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-996gp"] Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.749947 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-996gp"] Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.792321 4636 scope.go:117] "RemoveContainer" containerID="ee00a7dd0f5b5f5602b3b61e70f4ddd979c6ac34b531d7065dd63f66a581897e" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.839393 4636 scope.go:117] "RemoveContainer" containerID="df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765" Oct 02 21:59:06 crc kubenswrapper[4636]: E1002 21:59:06.841292 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765\": container with ID starting with df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765 not found: ID does not exist" containerID="df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.841355 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765"} err="failed to get container status \"df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765\": rpc error: code = NotFound desc = could not find container \"df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765\": container with ID starting with df2f52161674d705fb9a39db651579f500f00b1b4320dc383c977b43c223c765 not found: ID does not exist" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.841393 4636 scope.go:117] "RemoveContainer" containerID="8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9" Oct 02 21:59:06 crc kubenswrapper[4636]: E1002 21:59:06.841771 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9\": container with ID starting with 8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9 not found: ID does not exist" containerID="8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.841871 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9"} err="failed to get container status \"8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9\": rpc error: code = NotFound desc = could not find container \"8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9\": container with ID starting with 8471d2f61f7e8ae950a49d06e5c52c3c2ede3cba74e7f0e92d383e59de79ddc9 not found: ID does not exist" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.841949 4636 scope.go:117] "RemoveContainer" containerID="ee00a7dd0f5b5f5602b3b61e70f4ddd979c6ac34b531d7065dd63f66a581897e" Oct 02 21:59:06 crc kubenswrapper[4636]: E1002 21:59:06.842473 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee00a7dd0f5b5f5602b3b61e70f4ddd979c6ac34b531d7065dd63f66a581897e\": container with ID starting with ee00a7dd0f5b5f5602b3b61e70f4ddd979c6ac34b531d7065dd63f66a581897e not found: ID does not exist" containerID="ee00a7dd0f5b5f5602b3b61e70f4ddd979c6ac34b531d7065dd63f66a581897e" Oct 02 21:59:06 crc kubenswrapper[4636]: I1002 21:59:06.842518 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee00a7dd0f5b5f5602b3b61e70f4ddd979c6ac34b531d7065dd63f66a581897e"} err="failed to get container status \"ee00a7dd0f5b5f5602b3b61e70f4ddd979c6ac34b531d7065dd63f66a581897e\": rpc error: code = NotFound desc = could not find container \"ee00a7dd0f5b5f5602b3b61e70f4ddd979c6ac34b531d7065dd63f66a581897e\": container with ID starting with ee00a7dd0f5b5f5602b3b61e70f4ddd979c6ac34b531d7065dd63f66a581897e not found: ID does not exist" Oct 02 21:59:07 crc kubenswrapper[4636]: I1002 21:59:07.616368 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8e39dfe-39a0-4105-83da-e1b0770728f6" path="/var/lib/kubelet/pods/b8e39dfe-39a0-4105-83da-e1b0770728f6/volumes" Oct 02 21:59:07 crc kubenswrapper[4636]: I1002 21:59:07.702280 4636 generic.go:334] "Generic (PLEG): container finished" podID="c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" containerID="314b794e7f42c1ab6cec2458a9f96ba1509d9c29b1a75563fcb239f3c2fc5206" exitCode=0 Oct 02 21:59:07 crc kubenswrapper[4636]: I1002 21:59:07.702340 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" event={"ID":"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0","Type":"ContainerDied","Data":"314b794e7f42c1ab6cec2458a9f96ba1509d9c29b1a75563fcb239f3c2fc5206"} Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.180880 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.265672 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ssh-key\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.265718 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-telemetry-combined-ca-bundle\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.265778 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-libvirt-combined-ca-bundle\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.265823 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5v8tz\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-kube-api-access-5v8tz\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.265849 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-nova-combined-ca-bundle\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.265876 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-repo-setup-combined-ca-bundle\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.265899 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.265920 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.265939 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-inventory\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.265958 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-neutron-metadata-combined-ca-bundle\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.265984 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-ovn-default-certs-0\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.266023 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ovn-combined-ca-bundle\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.266051 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.266067 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-bootstrap-combined-ca-bundle\") pod \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\" (UID: \"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0\") " Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.272196 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.273118 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.273938 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.274594 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.276180 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.276684 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.278148 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.278526 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.278751 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.281069 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-kube-api-access-5v8tz" (OuterVolumeSpecName: "kube-api-access-5v8tz") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "kube-api-access-5v8tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.290358 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.292572 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.300353 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-inventory" (OuterVolumeSpecName: "inventory") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.325657 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" (UID: "c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368002 4636 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368045 4636 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368063 4636 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368079 4636 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368096 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368110 4636 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368122 4636 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368135 4636 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368147 4636 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368158 4636 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368169 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368180 4636 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368220 4636 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.368233 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5v8tz\" (UniqueName: \"kubernetes.io/projected/c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0-kube-api-access-5v8tz\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.746172 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" event={"ID":"c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0","Type":"ContainerDied","Data":"6dbdc9029749c4bb19a75e39bee0f9fe8a767f161f29ece6d8d2814b677e1233"} Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.746215 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6dbdc9029749c4bb19a75e39bee0f9fe8a767f161f29ece6d8d2814b677e1233" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.746283 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.834565 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj"] Oct 02 21:59:09 crc kubenswrapper[4636]: E1002 21:59:09.834919 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerName="extract-utilities" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.834930 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerName="extract-utilities" Oct 02 21:59:09 crc kubenswrapper[4636]: E1002 21:59:09.834943 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.834950 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 02 21:59:09 crc kubenswrapper[4636]: E1002 21:59:09.834975 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerName="extract-content" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.834980 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerName="extract-content" Oct 02 21:59:09 crc kubenswrapper[4636]: E1002 21:59:09.835000 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerName="registry-server" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.835006 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerName="registry-server" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.835164 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.835184 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8e39dfe-39a0-4105-83da-e1b0770728f6" containerName="registry-server" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.835781 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.838450 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.838786 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.838860 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.839090 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.842735 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.877860 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.877930 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.877995 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.878035 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kx4l\" (UniqueName: \"kubernetes.io/projected/681a69d8-8e31-468a-be24-c8ad9db1b0f6-kube-api-access-6kx4l\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.878068 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.888751 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj"] Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.979740 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.979872 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.979937 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.979975 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kx4l\" (UniqueName: \"kubernetes.io/projected/681a69d8-8e31-468a-be24-c8ad9db1b0f6-kube-api-access-6kx4l\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.980011 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.980683 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.984571 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.984810 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.985440 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:09 crc kubenswrapper[4636]: I1002 21:59:09.996966 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kx4l\" (UniqueName: \"kubernetes.io/projected/681a69d8-8e31-468a-be24-c8ad9db1b0f6-kube-api-access-6kx4l\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jg4dj\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:10 crc kubenswrapper[4636]: I1002 21:59:10.153877 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 21:59:10 crc kubenswrapper[4636]: I1002 21:59:10.710568 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj"] Oct 02 21:59:10 crc kubenswrapper[4636]: I1002 21:59:10.758533 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" event={"ID":"681a69d8-8e31-468a-be24-c8ad9db1b0f6","Type":"ContainerStarted","Data":"ad7eaafe904bf065ca743ef90b3aa09280e140e2e0f17a963c590d5b8b6a50f0"} Oct 02 21:59:11 crc kubenswrapper[4636]: I1002 21:59:11.771973 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" event={"ID":"681a69d8-8e31-468a-be24-c8ad9db1b0f6","Type":"ContainerStarted","Data":"fa75eaf4ce2bec9ff7f9bec88bd60dce1665fde3e0a7d9010089164d77def28f"} Oct 02 21:59:11 crc kubenswrapper[4636]: I1002 21:59:11.806357 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" podStartSLOduration=2.6541191509999997 podStartE2EDuration="2.806327897s" podCreationTimestamp="2025-10-02 21:59:09 +0000 UTC" firstStartedPulling="2025-10-02 21:59:10.715127317 +0000 UTC m=+2142.038135356" lastFinishedPulling="2025-10-02 21:59:10.867336083 +0000 UTC m=+2142.190344102" observedRunningTime="2025-10-02 21:59:11.802475094 +0000 UTC m=+2143.125483153" watchObservedRunningTime="2025-10-02 21:59:11.806327897 +0000 UTC m=+2143.129335956" Oct 02 21:59:23 crc kubenswrapper[4636]: I1002 21:59:23.117816 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:59:23 crc kubenswrapper[4636]: I1002 21:59:23.118429 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.025275 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lxcfp"] Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.028285 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.063834 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lxcfp"] Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.190396 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-utilities\") pod \"community-operators-lxcfp\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.190688 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9txrv\" (UniqueName: \"kubernetes.io/projected/985f229f-2156-40a2-ab21-f1c108b12322-kube-api-access-9txrv\") pod \"community-operators-lxcfp\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.190935 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-catalog-content\") pod \"community-operators-lxcfp\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.292185 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-utilities\") pod \"community-operators-lxcfp\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.292228 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9txrv\" (UniqueName: \"kubernetes.io/projected/985f229f-2156-40a2-ab21-f1c108b12322-kube-api-access-9txrv\") pod \"community-operators-lxcfp\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.292337 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-catalog-content\") pod \"community-operators-lxcfp\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.292710 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-utilities\") pod \"community-operators-lxcfp\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.292787 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-catalog-content\") pod \"community-operators-lxcfp\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.317870 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9txrv\" (UniqueName: \"kubernetes.io/projected/985f229f-2156-40a2-ab21-f1c108b12322-kube-api-access-9txrv\") pod \"community-operators-lxcfp\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.346253 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:42 crc kubenswrapper[4636]: I1002 21:59:42.905781 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lxcfp"] Oct 02 21:59:43 crc kubenswrapper[4636]: I1002 21:59:43.089488 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxcfp" event={"ID":"985f229f-2156-40a2-ab21-f1c108b12322","Type":"ContainerStarted","Data":"4a9f2af931e3e11ebd115ddb2ed8db2e4836f3f5242cd45355688f596458af6b"} Oct 02 21:59:43 crc kubenswrapper[4636]: E1002 21:59:43.338760 4636 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod985f229f_2156_40a2_ab21_f1c108b12322.slice/crio-conmon-37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod985f229f_2156_40a2_ab21_f1c108b12322.slice/crio-37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736.scope\": RecentStats: unable to find data in memory cache]" Oct 02 21:59:44 crc kubenswrapper[4636]: I1002 21:59:44.098233 4636 generic.go:334] "Generic (PLEG): container finished" podID="985f229f-2156-40a2-ab21-f1c108b12322" containerID="37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736" exitCode=0 Oct 02 21:59:44 crc kubenswrapper[4636]: I1002 21:59:44.098290 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxcfp" event={"ID":"985f229f-2156-40a2-ab21-f1c108b12322","Type":"ContainerDied","Data":"37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736"} Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.431980 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wgzxf"] Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.434710 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.447256 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wgzxf"] Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.555168 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpvls\" (UniqueName: \"kubernetes.io/projected/ce63bc0b-bd13-4118-8a3e-856682323e23-kube-api-access-bpvls\") pod \"redhat-marketplace-wgzxf\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.555230 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-catalog-content\") pod \"redhat-marketplace-wgzxf\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.555567 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-utilities\") pod \"redhat-marketplace-wgzxf\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.657763 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-utilities\") pod \"redhat-marketplace-wgzxf\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.657862 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpvls\" (UniqueName: \"kubernetes.io/projected/ce63bc0b-bd13-4118-8a3e-856682323e23-kube-api-access-bpvls\") pod \"redhat-marketplace-wgzxf\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.657911 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-catalog-content\") pod \"redhat-marketplace-wgzxf\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.658394 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-catalog-content\") pod \"redhat-marketplace-wgzxf\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.658410 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-utilities\") pod \"redhat-marketplace-wgzxf\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.690623 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpvls\" (UniqueName: \"kubernetes.io/projected/ce63bc0b-bd13-4118-8a3e-856682323e23-kube-api-access-bpvls\") pod \"redhat-marketplace-wgzxf\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:45 crc kubenswrapper[4636]: I1002 21:59:45.754551 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:46 crc kubenswrapper[4636]: I1002 21:59:46.126568 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxcfp" event={"ID":"985f229f-2156-40a2-ab21-f1c108b12322","Type":"ContainerStarted","Data":"813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db"} Oct 02 21:59:46 crc kubenswrapper[4636]: I1002 21:59:46.229189 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wgzxf"] Oct 02 21:59:46 crc kubenswrapper[4636]: W1002 21:59:46.234494 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce63bc0b_bd13_4118_8a3e_856682323e23.slice/crio-acd8bda5b51be1edf1e5a8019d88cda782647717f3358386cc1c498998927d5a WatchSource:0}: Error finding container acd8bda5b51be1edf1e5a8019d88cda782647717f3358386cc1c498998927d5a: Status 404 returned error can't find the container with id acd8bda5b51be1edf1e5a8019d88cda782647717f3358386cc1c498998927d5a Oct 02 21:59:47 crc kubenswrapper[4636]: I1002 21:59:47.136829 4636 generic.go:334] "Generic (PLEG): container finished" podID="ce63bc0b-bd13-4118-8a3e-856682323e23" containerID="872896d155177d2ffe0855a342179e8c944a05a457872f98fb0a7c84a1c7d824" exitCode=0 Oct 02 21:59:47 crc kubenswrapper[4636]: I1002 21:59:47.136940 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgzxf" event={"ID":"ce63bc0b-bd13-4118-8a3e-856682323e23","Type":"ContainerDied","Data":"872896d155177d2ffe0855a342179e8c944a05a457872f98fb0a7c84a1c7d824"} Oct 02 21:59:47 crc kubenswrapper[4636]: I1002 21:59:47.137241 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgzxf" event={"ID":"ce63bc0b-bd13-4118-8a3e-856682323e23","Type":"ContainerStarted","Data":"acd8bda5b51be1edf1e5a8019d88cda782647717f3358386cc1c498998927d5a"} Oct 02 21:59:47 crc kubenswrapper[4636]: I1002 21:59:47.139688 4636 generic.go:334] "Generic (PLEG): container finished" podID="985f229f-2156-40a2-ab21-f1c108b12322" containerID="813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db" exitCode=0 Oct 02 21:59:47 crc kubenswrapper[4636]: I1002 21:59:47.139739 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxcfp" event={"ID":"985f229f-2156-40a2-ab21-f1c108b12322","Type":"ContainerDied","Data":"813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db"} Oct 02 21:59:48 crc kubenswrapper[4636]: I1002 21:59:48.156057 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgzxf" event={"ID":"ce63bc0b-bd13-4118-8a3e-856682323e23","Type":"ContainerStarted","Data":"f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9"} Oct 02 21:59:48 crc kubenswrapper[4636]: I1002 21:59:48.159373 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxcfp" event={"ID":"985f229f-2156-40a2-ab21-f1c108b12322","Type":"ContainerStarted","Data":"6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919"} Oct 02 21:59:48 crc kubenswrapper[4636]: I1002 21:59:48.204662 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lxcfp" podStartSLOduration=2.697179522 podStartE2EDuration="6.204643375s" podCreationTimestamp="2025-10-02 21:59:42 +0000 UTC" firstStartedPulling="2025-10-02 21:59:44.100072573 +0000 UTC m=+2175.423080592" lastFinishedPulling="2025-10-02 21:59:47.607536416 +0000 UTC m=+2178.930544445" observedRunningTime="2025-10-02 21:59:48.198937323 +0000 UTC m=+2179.521945342" watchObservedRunningTime="2025-10-02 21:59:48.204643375 +0000 UTC m=+2179.527651404" Oct 02 21:59:49 crc kubenswrapper[4636]: I1002 21:59:49.168829 4636 generic.go:334] "Generic (PLEG): container finished" podID="ce63bc0b-bd13-4118-8a3e-856682323e23" containerID="f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9" exitCode=0 Oct 02 21:59:49 crc kubenswrapper[4636]: I1002 21:59:49.169588 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgzxf" event={"ID":"ce63bc0b-bd13-4118-8a3e-856682323e23","Type":"ContainerDied","Data":"f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9"} Oct 02 21:59:50 crc kubenswrapper[4636]: I1002 21:59:50.179593 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgzxf" event={"ID":"ce63bc0b-bd13-4118-8a3e-856682323e23","Type":"ContainerStarted","Data":"ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983"} Oct 02 21:59:50 crc kubenswrapper[4636]: I1002 21:59:50.195928 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wgzxf" podStartSLOduration=2.721659302 podStartE2EDuration="5.195912197s" podCreationTimestamp="2025-10-02 21:59:45 +0000 UTC" firstStartedPulling="2025-10-02 21:59:47.13865195 +0000 UTC m=+2178.461659969" lastFinishedPulling="2025-10-02 21:59:49.612904835 +0000 UTC m=+2180.935912864" observedRunningTime="2025-10-02 21:59:50.193928554 +0000 UTC m=+2181.516936583" watchObservedRunningTime="2025-10-02 21:59:50.195912197 +0000 UTC m=+2181.518920216" Oct 02 21:59:52 crc kubenswrapper[4636]: I1002 21:59:52.347597 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:52 crc kubenswrapper[4636]: I1002 21:59:52.347641 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:52 crc kubenswrapper[4636]: I1002 21:59:52.419903 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:53 crc kubenswrapper[4636]: I1002 21:59:53.117377 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 21:59:53 crc kubenswrapper[4636]: I1002 21:59:53.117449 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 21:59:53 crc kubenswrapper[4636]: I1002 21:59:53.278674 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:54 crc kubenswrapper[4636]: I1002 21:59:54.416914 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lxcfp"] Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.222379 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lxcfp" podUID="985f229f-2156-40a2-ab21-f1c108b12322" containerName="registry-server" containerID="cri-o://6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919" gracePeriod=2 Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.678237 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.755757 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.755795 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.769002 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9txrv\" (UniqueName: \"kubernetes.io/projected/985f229f-2156-40a2-ab21-f1c108b12322-kube-api-access-9txrv\") pod \"985f229f-2156-40a2-ab21-f1c108b12322\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.769433 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-catalog-content\") pod \"985f229f-2156-40a2-ab21-f1c108b12322\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.769503 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-utilities\") pod \"985f229f-2156-40a2-ab21-f1c108b12322\" (UID: \"985f229f-2156-40a2-ab21-f1c108b12322\") " Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.771046 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-utilities" (OuterVolumeSpecName: "utilities") pod "985f229f-2156-40a2-ab21-f1c108b12322" (UID: "985f229f-2156-40a2-ab21-f1c108b12322"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.777221 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/985f229f-2156-40a2-ab21-f1c108b12322-kube-api-access-9txrv" (OuterVolumeSpecName: "kube-api-access-9txrv") pod "985f229f-2156-40a2-ab21-f1c108b12322" (UID: "985f229f-2156-40a2-ab21-f1c108b12322"). InnerVolumeSpecName "kube-api-access-9txrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.811042 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.820469 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "985f229f-2156-40a2-ab21-f1c108b12322" (UID: "985f229f-2156-40a2-ab21-f1c108b12322"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.871905 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9txrv\" (UniqueName: \"kubernetes.io/projected/985f229f-2156-40a2-ab21-f1c108b12322-kube-api-access-9txrv\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.871952 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:55 crc kubenswrapper[4636]: I1002 21:59:55.871962 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/985f229f-2156-40a2-ab21-f1c108b12322-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.231095 4636 generic.go:334] "Generic (PLEG): container finished" podID="985f229f-2156-40a2-ab21-f1c108b12322" containerID="6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919" exitCode=0 Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.231915 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxcfp" event={"ID":"985f229f-2156-40a2-ab21-f1c108b12322","Type":"ContainerDied","Data":"6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919"} Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.231970 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lxcfp" Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.231999 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxcfp" event={"ID":"985f229f-2156-40a2-ab21-f1c108b12322","Type":"ContainerDied","Data":"4a9f2af931e3e11ebd115ddb2ed8db2e4836f3f5242cd45355688f596458af6b"} Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.232026 4636 scope.go:117] "RemoveContainer" containerID="6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919" Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.267269 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lxcfp"] Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.269584 4636 scope.go:117] "RemoveContainer" containerID="813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db" Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.277127 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lxcfp"] Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.296034 4636 scope.go:117] "RemoveContainer" containerID="37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736" Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.300908 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.343244 4636 scope.go:117] "RemoveContainer" containerID="6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919" Oct 02 21:59:56 crc kubenswrapper[4636]: E1002 21:59:56.343733 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919\": container with ID starting with 6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919 not found: ID does not exist" containerID="6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919" Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.343852 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919"} err="failed to get container status \"6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919\": rpc error: code = NotFound desc = could not find container \"6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919\": container with ID starting with 6aa415d0eaf2b947f55e293546ad5583091c3654fa80640417fd50f6def1b919 not found: ID does not exist" Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.343942 4636 scope.go:117] "RemoveContainer" containerID="813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db" Oct 02 21:59:56 crc kubenswrapper[4636]: E1002 21:59:56.344478 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db\": container with ID starting with 813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db not found: ID does not exist" containerID="813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db" Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.344512 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db"} err="failed to get container status \"813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db\": rpc error: code = NotFound desc = could not find container \"813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db\": container with ID starting with 813f0bdf73f65988f954336012ab8122916b19e8c038b7b7b968015f384e26db not found: ID does not exist" Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.344535 4636 scope.go:117] "RemoveContainer" containerID="37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736" Oct 02 21:59:56 crc kubenswrapper[4636]: E1002 21:59:56.344812 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736\": container with ID starting with 37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736 not found: ID does not exist" containerID="37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736" Oct 02 21:59:56 crc kubenswrapper[4636]: I1002 21:59:56.344887 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736"} err="failed to get container status \"37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736\": rpc error: code = NotFound desc = could not find container \"37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736\": container with ID starting with 37444f4e86ac688ce706d94d712f008da9b8d5b8d5e8c81da299ea9a6588b736 not found: ID does not exist" Oct 02 21:59:57 crc kubenswrapper[4636]: I1002 21:59:57.613015 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="985f229f-2156-40a2-ab21-f1c108b12322" path="/var/lib/kubelet/pods/985f229f-2156-40a2-ab21-f1c108b12322/volumes" Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.216261 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wgzxf"] Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.216900 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wgzxf" podUID="ce63bc0b-bd13-4118-8a3e-856682323e23" containerName="registry-server" containerID="cri-o://ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983" gracePeriod=2 Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.659050 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.760103 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-utilities\") pod \"ce63bc0b-bd13-4118-8a3e-856682323e23\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.760510 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpvls\" (UniqueName: \"kubernetes.io/projected/ce63bc0b-bd13-4118-8a3e-856682323e23-kube-api-access-bpvls\") pod \"ce63bc0b-bd13-4118-8a3e-856682323e23\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.760854 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-catalog-content\") pod \"ce63bc0b-bd13-4118-8a3e-856682323e23\" (UID: \"ce63bc0b-bd13-4118-8a3e-856682323e23\") " Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.761245 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-utilities" (OuterVolumeSpecName: "utilities") pod "ce63bc0b-bd13-4118-8a3e-856682323e23" (UID: "ce63bc0b-bd13-4118-8a3e-856682323e23"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.761938 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.767147 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce63bc0b-bd13-4118-8a3e-856682323e23-kube-api-access-bpvls" (OuterVolumeSpecName: "kube-api-access-bpvls") pod "ce63bc0b-bd13-4118-8a3e-856682323e23" (UID: "ce63bc0b-bd13-4118-8a3e-856682323e23"). InnerVolumeSpecName "kube-api-access-bpvls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.798601 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce63bc0b-bd13-4118-8a3e-856682323e23" (UID: "ce63bc0b-bd13-4118-8a3e-856682323e23"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.864918 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce63bc0b-bd13-4118-8a3e-856682323e23-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 21:59:59 crc kubenswrapper[4636]: I1002 21:59:59.864959 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpvls\" (UniqueName: \"kubernetes.io/projected/ce63bc0b-bd13-4118-8a3e-856682323e23-kube-api-access-bpvls\") on node \"crc\" DevicePath \"\"" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.159819 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb"] Oct 02 22:00:00 crc kubenswrapper[4636]: E1002 22:00:00.160307 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce63bc0b-bd13-4118-8a3e-856682323e23" containerName="extract-utilities" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.160328 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce63bc0b-bd13-4118-8a3e-856682323e23" containerName="extract-utilities" Oct 02 22:00:00 crc kubenswrapper[4636]: E1002 22:00:00.160348 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985f229f-2156-40a2-ab21-f1c108b12322" containerName="registry-server" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.160357 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="985f229f-2156-40a2-ab21-f1c108b12322" containerName="registry-server" Oct 02 22:00:00 crc kubenswrapper[4636]: E1002 22:00:00.160388 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985f229f-2156-40a2-ab21-f1c108b12322" containerName="extract-utilities" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.160396 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="985f229f-2156-40a2-ab21-f1c108b12322" containerName="extract-utilities" Oct 02 22:00:00 crc kubenswrapper[4636]: E1002 22:00:00.160407 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce63bc0b-bd13-4118-8a3e-856682323e23" containerName="extract-content" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.160418 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce63bc0b-bd13-4118-8a3e-856682323e23" containerName="extract-content" Oct 02 22:00:00 crc kubenswrapper[4636]: E1002 22:00:00.160432 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985f229f-2156-40a2-ab21-f1c108b12322" containerName="extract-content" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.160442 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="985f229f-2156-40a2-ab21-f1c108b12322" containerName="extract-content" Oct 02 22:00:00 crc kubenswrapper[4636]: E1002 22:00:00.160465 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce63bc0b-bd13-4118-8a3e-856682323e23" containerName="registry-server" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.160472 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce63bc0b-bd13-4118-8a3e-856682323e23" containerName="registry-server" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.160721 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce63bc0b-bd13-4118-8a3e-856682323e23" containerName="registry-server" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.160762 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="985f229f-2156-40a2-ab21-f1c108b12322" containerName="registry-server" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.161459 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.163304 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.164482 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.170184 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb"] Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.270803 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-config-volume\") pod \"collect-profiles-29324040-x8sbb\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.271133 4636 generic.go:334] "Generic (PLEG): container finished" podID="ce63bc0b-bd13-4118-8a3e-856682323e23" containerID="ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983" exitCode=0 Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.271159 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgzxf" event={"ID":"ce63bc0b-bd13-4118-8a3e-856682323e23","Type":"ContainerDied","Data":"ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983"} Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.271716 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wgzxf" event={"ID":"ce63bc0b-bd13-4118-8a3e-856682323e23","Type":"ContainerDied","Data":"acd8bda5b51be1edf1e5a8019d88cda782647717f3358386cc1c498998927d5a"} Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.271734 4636 scope.go:117] "RemoveContainer" containerID="ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.271180 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wgzxf" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.272074 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frt42\" (UniqueName: \"kubernetes.io/projected/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-kube-api-access-frt42\") pod \"collect-profiles-29324040-x8sbb\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.272221 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-secret-volume\") pod \"collect-profiles-29324040-x8sbb\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.291102 4636 scope.go:117] "RemoveContainer" containerID="f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.308867 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wgzxf"] Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.316310 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wgzxf"] Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.335129 4636 scope.go:117] "RemoveContainer" containerID="872896d155177d2ffe0855a342179e8c944a05a457872f98fb0a7c84a1c7d824" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.357313 4636 scope.go:117] "RemoveContainer" containerID="ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983" Oct 02 22:00:00 crc kubenswrapper[4636]: E1002 22:00:00.357760 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983\": container with ID starting with ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983 not found: ID does not exist" containerID="ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.357792 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983"} err="failed to get container status \"ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983\": rpc error: code = NotFound desc = could not find container \"ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983\": container with ID starting with ec9441329e22c2cecf98fe0a5ccda47c03fdf61a2a3203fe7376542f04673983 not found: ID does not exist" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.357814 4636 scope.go:117] "RemoveContainer" containerID="f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9" Oct 02 22:00:00 crc kubenswrapper[4636]: E1002 22:00:00.358016 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9\": container with ID starting with f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9 not found: ID does not exist" containerID="f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.358081 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9"} err="failed to get container status \"f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9\": rpc error: code = NotFound desc = could not find container \"f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9\": container with ID starting with f377b8a37f7684c27f19676127e19d676e8beeb90a9d84df6ef86425cbf21dd9 not found: ID does not exist" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.358098 4636 scope.go:117] "RemoveContainer" containerID="872896d155177d2ffe0855a342179e8c944a05a457872f98fb0a7c84a1c7d824" Oct 02 22:00:00 crc kubenswrapper[4636]: E1002 22:00:00.364069 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"872896d155177d2ffe0855a342179e8c944a05a457872f98fb0a7c84a1c7d824\": container with ID starting with 872896d155177d2ffe0855a342179e8c944a05a457872f98fb0a7c84a1c7d824 not found: ID does not exist" containerID="872896d155177d2ffe0855a342179e8c944a05a457872f98fb0a7c84a1c7d824" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.364113 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"872896d155177d2ffe0855a342179e8c944a05a457872f98fb0a7c84a1c7d824"} err="failed to get container status \"872896d155177d2ffe0855a342179e8c944a05a457872f98fb0a7c84a1c7d824\": rpc error: code = NotFound desc = could not find container \"872896d155177d2ffe0855a342179e8c944a05a457872f98fb0a7c84a1c7d824\": container with ID starting with 872896d155177d2ffe0855a342179e8c944a05a457872f98fb0a7c84a1c7d824 not found: ID does not exist" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.374015 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-config-volume\") pod \"collect-profiles-29324040-x8sbb\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.374159 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frt42\" (UniqueName: \"kubernetes.io/projected/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-kube-api-access-frt42\") pod \"collect-profiles-29324040-x8sbb\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.374190 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-secret-volume\") pod \"collect-profiles-29324040-x8sbb\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.374887 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-config-volume\") pod \"collect-profiles-29324040-x8sbb\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.387649 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-secret-volume\") pod \"collect-profiles-29324040-x8sbb\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.392984 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frt42\" (UniqueName: \"kubernetes.io/projected/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-kube-api-access-frt42\") pod \"collect-profiles-29324040-x8sbb\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:00 crc kubenswrapper[4636]: I1002 22:00:00.479741 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:01 crc kubenswrapper[4636]: I1002 22:00:01.018004 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb"] Oct 02 22:00:01 crc kubenswrapper[4636]: I1002 22:00:01.281534 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" event={"ID":"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9","Type":"ContainerStarted","Data":"fc08f0071358ca188acc2a17eb6c0ea67aaca171c2f7a5e53545687bb98fc830"} Oct 02 22:00:01 crc kubenswrapper[4636]: I1002 22:00:01.281584 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" event={"ID":"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9","Type":"ContainerStarted","Data":"e6e94730332da8de0497b0b39f8308dc7489c0c1fc0a1343ef85ca6221c1edbf"} Oct 02 22:00:01 crc kubenswrapper[4636]: I1002 22:00:01.312763 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" podStartSLOduration=1.31273079 podStartE2EDuration="1.31273079s" podCreationTimestamp="2025-10-02 22:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 22:00:01.310379017 +0000 UTC m=+2192.633387046" watchObservedRunningTime="2025-10-02 22:00:01.31273079 +0000 UTC m=+2192.635738809" Oct 02 22:00:01 crc kubenswrapper[4636]: I1002 22:00:01.615242 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce63bc0b-bd13-4118-8a3e-856682323e23" path="/var/lib/kubelet/pods/ce63bc0b-bd13-4118-8a3e-856682323e23/volumes" Oct 02 22:00:02 crc kubenswrapper[4636]: I1002 22:00:02.300074 4636 generic.go:334] "Generic (PLEG): container finished" podID="4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9" containerID="fc08f0071358ca188acc2a17eb6c0ea67aaca171c2f7a5e53545687bb98fc830" exitCode=0 Oct 02 22:00:02 crc kubenswrapper[4636]: I1002 22:00:02.300137 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" event={"ID":"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9","Type":"ContainerDied","Data":"fc08f0071358ca188acc2a17eb6c0ea67aaca171c2f7a5e53545687bb98fc830"} Oct 02 22:00:03 crc kubenswrapper[4636]: I1002 22:00:03.649841 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:03 crc kubenswrapper[4636]: I1002 22:00:03.837470 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-secret-volume\") pod \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " Oct 02 22:00:03 crc kubenswrapper[4636]: I1002 22:00:03.837876 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-config-volume\") pod \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " Oct 02 22:00:03 crc kubenswrapper[4636]: I1002 22:00:03.837903 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frt42\" (UniqueName: \"kubernetes.io/projected/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-kube-api-access-frt42\") pod \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\" (UID: \"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9\") " Oct 02 22:00:03 crc kubenswrapper[4636]: I1002 22:00:03.838526 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-config-volume" (OuterVolumeSpecName: "config-volume") pod "4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9" (UID: "4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 22:00:03 crc kubenswrapper[4636]: I1002 22:00:03.839269 4636 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 22:00:03 crc kubenswrapper[4636]: I1002 22:00:03.844147 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9" (UID: "4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:00:03 crc kubenswrapper[4636]: I1002 22:00:03.856143 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-kube-api-access-frt42" (OuterVolumeSpecName: "kube-api-access-frt42") pod "4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9" (UID: "4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9"). InnerVolumeSpecName "kube-api-access-frt42". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:00:03 crc kubenswrapper[4636]: I1002 22:00:03.940766 4636 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 02 22:00:03 crc kubenswrapper[4636]: I1002 22:00:03.940808 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frt42\" (UniqueName: \"kubernetes.io/projected/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9-kube-api-access-frt42\") on node \"crc\" DevicePath \"\"" Oct 02 22:00:04 crc kubenswrapper[4636]: I1002 22:00:04.320395 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" event={"ID":"4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9","Type":"ContainerDied","Data":"e6e94730332da8de0497b0b39f8308dc7489c0c1fc0a1343ef85ca6221c1edbf"} Oct 02 22:00:04 crc kubenswrapper[4636]: I1002 22:00:04.320719 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6e94730332da8de0497b0b39f8308dc7489c0c1fc0a1343ef85ca6221c1edbf" Oct 02 22:00:04 crc kubenswrapper[4636]: I1002 22:00:04.320899 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb" Oct 02 22:00:04 crc kubenswrapper[4636]: I1002 22:00:04.381961 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78"] Oct 02 22:00:04 crc kubenswrapper[4636]: I1002 22:00:04.391637 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323995-msb78"] Oct 02 22:00:05 crc kubenswrapper[4636]: I1002 22:00:05.615581 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c7843cb-a7a2-4db4-b244-f88476448291" path="/var/lib/kubelet/pods/3c7843cb-a7a2-4db4-b244-f88476448291/volumes" Oct 02 22:00:05 crc kubenswrapper[4636]: I1002 22:00:05.857228 4636 scope.go:117] "RemoveContainer" containerID="31365e2b97feab8dab1ff10cffd56c074b257ed7b231468afb2391c66cc4ec0c" Oct 02 22:00:23 crc kubenswrapper[4636]: I1002 22:00:23.117532 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:00:23 crc kubenswrapper[4636]: I1002 22:00:23.118299 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:00:23 crc kubenswrapper[4636]: I1002 22:00:23.118367 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 22:00:23 crc kubenswrapper[4636]: I1002 22:00:23.119235 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 22:00:23 crc kubenswrapper[4636]: I1002 22:00:23.119335 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" gracePeriod=600 Oct 02 22:00:23 crc kubenswrapper[4636]: E1002 22:00:23.257128 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:00:23 crc kubenswrapper[4636]: I1002 22:00:23.510228 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" exitCode=0 Oct 02 22:00:23 crc kubenswrapper[4636]: I1002 22:00:23.510289 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35"} Oct 02 22:00:23 crc kubenswrapper[4636]: I1002 22:00:23.510355 4636 scope.go:117] "RemoveContainer" containerID="c6da1ee0722f89363964853239734f7c0beeeafaa1f6842cc16f9a803dfcc9e2" Oct 02 22:00:23 crc kubenswrapper[4636]: I1002 22:00:23.511007 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:00:23 crc kubenswrapper[4636]: E1002 22:00:23.511266 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:00:24 crc kubenswrapper[4636]: I1002 22:00:24.519556 4636 generic.go:334] "Generic (PLEG): container finished" podID="681a69d8-8e31-468a-be24-c8ad9db1b0f6" containerID="fa75eaf4ce2bec9ff7f9bec88bd60dce1665fde3e0a7d9010089164d77def28f" exitCode=0 Oct 02 22:00:24 crc kubenswrapper[4636]: I1002 22:00:24.519643 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" event={"ID":"681a69d8-8e31-468a-be24-c8ad9db1b0f6","Type":"ContainerDied","Data":"fa75eaf4ce2bec9ff7f9bec88bd60dce1665fde3e0a7d9010089164d77def28f"} Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.000740 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.165997 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovn-combined-ca-bundle\") pod \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.166123 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ssh-key\") pod \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.166293 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-inventory\") pod \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.166370 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kx4l\" (UniqueName: \"kubernetes.io/projected/681a69d8-8e31-468a-be24-c8ad9db1b0f6-kube-api-access-6kx4l\") pod \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.166425 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovncontroller-config-0\") pod \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\" (UID: \"681a69d8-8e31-468a-be24-c8ad9db1b0f6\") " Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.179105 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "681a69d8-8e31-468a-be24-c8ad9db1b0f6" (UID: "681a69d8-8e31-468a-be24-c8ad9db1b0f6"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.181139 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/681a69d8-8e31-468a-be24-c8ad9db1b0f6-kube-api-access-6kx4l" (OuterVolumeSpecName: "kube-api-access-6kx4l") pod "681a69d8-8e31-468a-be24-c8ad9db1b0f6" (UID: "681a69d8-8e31-468a-be24-c8ad9db1b0f6"). InnerVolumeSpecName "kube-api-access-6kx4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.196140 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "681a69d8-8e31-468a-be24-c8ad9db1b0f6" (UID: "681a69d8-8e31-468a-be24-c8ad9db1b0f6"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.197596 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-inventory" (OuterVolumeSpecName: "inventory") pod "681a69d8-8e31-468a-be24-c8ad9db1b0f6" (UID: "681a69d8-8e31-468a-be24-c8ad9db1b0f6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.199427 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "681a69d8-8e31-468a-be24-c8ad9db1b0f6" (UID: "681a69d8-8e31-468a-be24-c8ad9db1b0f6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.268649 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kx4l\" (UniqueName: \"kubernetes.io/projected/681a69d8-8e31-468a-be24-c8ad9db1b0f6-kube-api-access-6kx4l\") on node \"crc\" DevicePath \"\"" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.268856 4636 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.268953 4636 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.269010 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.269074 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/681a69d8-8e31-468a-be24-c8ad9db1b0f6-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.588576 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" event={"ID":"681a69d8-8e31-468a-be24-c8ad9db1b0f6","Type":"ContainerDied","Data":"ad7eaafe904bf065ca743ef90b3aa09280e140e2e0f17a963c590d5b8b6a50f0"} Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.588617 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jg4dj" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.588624 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad7eaafe904bf065ca743ef90b3aa09280e140e2e0f17a963c590d5b8b6a50f0" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.651564 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p"] Oct 02 22:00:26 crc kubenswrapper[4636]: E1002 22:00:26.652043 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9" containerName="collect-profiles" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.652060 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9" containerName="collect-profiles" Oct 02 22:00:26 crc kubenswrapper[4636]: E1002 22:00:26.652092 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="681a69d8-8e31-468a-be24-c8ad9db1b0f6" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.652099 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="681a69d8-8e31-468a-be24-c8ad9db1b0f6" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.652265 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="681a69d8-8e31-468a-be24-c8ad9db1b0f6" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.652293 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9" containerName="collect-profiles" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.654370 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.657376 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.657551 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.657732 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.657851 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.657951 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.658352 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.664620 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p"] Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.784282 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcbfc\" (UniqueName: \"kubernetes.io/projected/c30b6801-d279-40ae-8edf-a01189809528-kube-api-access-zcbfc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.784411 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.784498 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.784600 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.784643 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.784666 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.886468 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.886562 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.886626 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.886668 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.886918 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcbfc\" (UniqueName: \"kubernetes.io/projected/c30b6801-d279-40ae-8edf-a01189809528-kube-api-access-zcbfc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.886985 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.893277 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.893325 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.893867 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.903599 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.905014 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.909788 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcbfc\" (UniqueName: \"kubernetes.io/projected/c30b6801-d279-40ae-8edf-a01189809528-kube-api-access-zcbfc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:26 crc kubenswrapper[4636]: I1002 22:00:26.981983 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:00:27 crc kubenswrapper[4636]: I1002 22:00:27.519779 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p"] Oct 02 22:00:27 crc kubenswrapper[4636]: I1002 22:00:27.598915 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" event={"ID":"c30b6801-d279-40ae-8edf-a01189809528","Type":"ContainerStarted","Data":"8c7bd3479c50d04483b166d690efa3fb49ae3859dabc6b90e5d4d30ae723a351"} Oct 02 22:00:28 crc kubenswrapper[4636]: I1002 22:00:28.609803 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" event={"ID":"c30b6801-d279-40ae-8edf-a01189809528","Type":"ContainerStarted","Data":"d92490bc6dda743fee760e8e71e03893eba312a43ed129420474d7a876f92f85"} Oct 02 22:00:28 crc kubenswrapper[4636]: I1002 22:00:28.628331 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" podStartSLOduration=2.430558988 podStartE2EDuration="2.628317934s" podCreationTimestamp="2025-10-02 22:00:26 +0000 UTC" firstStartedPulling="2025-10-02 22:00:27.520061037 +0000 UTC m=+2218.843069056" lastFinishedPulling="2025-10-02 22:00:27.717819983 +0000 UTC m=+2219.040828002" observedRunningTime="2025-10-02 22:00:28.627399909 +0000 UTC m=+2219.950407928" watchObservedRunningTime="2025-10-02 22:00:28.628317934 +0000 UTC m=+2219.951325953" Oct 02 22:00:36 crc kubenswrapper[4636]: I1002 22:00:36.603998 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:00:36 crc kubenswrapper[4636]: E1002 22:00:36.605118 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:00:48 crc kubenswrapper[4636]: I1002 22:00:48.603276 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:00:48 crc kubenswrapper[4636]: E1002 22:00:48.604088 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.165139 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29324041-ffnwj"] Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.167163 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.183579 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29324041-ffnwj"] Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.254505 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4wjl\" (UniqueName: \"kubernetes.io/projected/831000fc-4ab3-4609-b42f-f45920de8917-kube-api-access-r4wjl\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.254585 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-fernet-keys\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.254765 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-config-data\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.254952 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-combined-ca-bundle\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.356842 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4wjl\" (UniqueName: \"kubernetes.io/projected/831000fc-4ab3-4609-b42f-f45920de8917-kube-api-access-r4wjl\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.356925 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-fernet-keys\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.357018 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-config-data\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.357102 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-combined-ca-bundle\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.364175 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-fernet-keys\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.364613 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-config-data\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.365450 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-combined-ca-bundle\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.377655 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4wjl\" (UniqueName: \"kubernetes.io/projected/831000fc-4ab3-4609-b42f-f45920de8917-kube-api-access-r4wjl\") pod \"keystone-cron-29324041-ffnwj\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:00 crc kubenswrapper[4636]: I1002 22:01:00.491956 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:01 crc kubenswrapper[4636]: I1002 22:01:01.009508 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29324041-ffnwj"] Oct 02 22:01:01 crc kubenswrapper[4636]: I1002 22:01:01.980816 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29324041-ffnwj" event={"ID":"831000fc-4ab3-4609-b42f-f45920de8917","Type":"ContainerStarted","Data":"2c1821177f0995bc287f9d9460de6828880cffd1d9c7ecda3326f52a7bf60b20"} Oct 02 22:01:01 crc kubenswrapper[4636]: I1002 22:01:01.981235 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29324041-ffnwj" event={"ID":"831000fc-4ab3-4609-b42f-f45920de8917","Type":"ContainerStarted","Data":"05f2e59aa531b227135bc24ede03f6df840605b4f3da9101d81bc5e84f548329"} Oct 02 22:01:02 crc kubenswrapper[4636]: I1002 22:01:02.005599 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29324041-ffnwj" podStartSLOduration=2.005571464 podStartE2EDuration="2.005571464s" podCreationTimestamp="2025-10-02 22:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 22:01:01.997973751 +0000 UTC m=+2253.320981770" watchObservedRunningTime="2025-10-02 22:01:02.005571464 +0000 UTC m=+2253.328579493" Oct 02 22:01:03 crc kubenswrapper[4636]: I1002 22:01:03.604687 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:01:03 crc kubenswrapper[4636]: E1002 22:01:03.605426 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:01:05 crc kubenswrapper[4636]: I1002 22:01:05.008303 4636 generic.go:334] "Generic (PLEG): container finished" podID="831000fc-4ab3-4609-b42f-f45920de8917" containerID="2c1821177f0995bc287f9d9460de6828880cffd1d9c7ecda3326f52a7bf60b20" exitCode=0 Oct 02 22:01:05 crc kubenswrapper[4636]: I1002 22:01:05.008470 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29324041-ffnwj" event={"ID":"831000fc-4ab3-4609-b42f-f45920de8917","Type":"ContainerDied","Data":"2c1821177f0995bc287f9d9460de6828880cffd1d9c7ecda3326f52a7bf60b20"} Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.331549 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.479215 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4wjl\" (UniqueName: \"kubernetes.io/projected/831000fc-4ab3-4609-b42f-f45920de8917-kube-api-access-r4wjl\") pod \"831000fc-4ab3-4609-b42f-f45920de8917\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.479546 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-combined-ca-bundle\") pod \"831000fc-4ab3-4609-b42f-f45920de8917\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.479663 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-config-data\") pod \"831000fc-4ab3-4609-b42f-f45920de8917\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.480045 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-fernet-keys\") pod \"831000fc-4ab3-4609-b42f-f45920de8917\" (UID: \"831000fc-4ab3-4609-b42f-f45920de8917\") " Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.485008 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "831000fc-4ab3-4609-b42f-f45920de8917" (UID: "831000fc-4ab3-4609-b42f-f45920de8917"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.492936 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831000fc-4ab3-4609-b42f-f45920de8917-kube-api-access-r4wjl" (OuterVolumeSpecName: "kube-api-access-r4wjl") pod "831000fc-4ab3-4609-b42f-f45920de8917" (UID: "831000fc-4ab3-4609-b42f-f45920de8917"). InnerVolumeSpecName "kube-api-access-r4wjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.519708 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "831000fc-4ab3-4609-b42f-f45920de8917" (UID: "831000fc-4ab3-4609-b42f-f45920de8917"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.540334 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-config-data" (OuterVolumeSpecName: "config-data") pod "831000fc-4ab3-4609-b42f-f45920de8917" (UID: "831000fc-4ab3-4609-b42f-f45920de8917"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.582566 4636 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.582603 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4wjl\" (UniqueName: \"kubernetes.io/projected/831000fc-4ab3-4609-b42f-f45920de8917-kube-api-access-r4wjl\") on node \"crc\" DevicePath \"\"" Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.582618 4636 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 22:01:06 crc kubenswrapper[4636]: I1002 22:01:06.582632 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/831000fc-4ab3-4609-b42f-f45920de8917-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 22:01:07 crc kubenswrapper[4636]: I1002 22:01:07.031082 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29324041-ffnwj" event={"ID":"831000fc-4ab3-4609-b42f-f45920de8917","Type":"ContainerDied","Data":"05f2e59aa531b227135bc24ede03f6df840605b4f3da9101d81bc5e84f548329"} Oct 02 22:01:07 crc kubenswrapper[4636]: I1002 22:01:07.031120 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05f2e59aa531b227135bc24ede03f6df840605b4f3da9101d81bc5e84f548329" Oct 02 22:01:07 crc kubenswrapper[4636]: I1002 22:01:07.031133 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29324041-ffnwj" Oct 02 22:01:15 crc kubenswrapper[4636]: I1002 22:01:15.604007 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:01:15 crc kubenswrapper[4636]: E1002 22:01:15.605237 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:01:23 crc kubenswrapper[4636]: I1002 22:01:23.197642 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" event={"ID":"c30b6801-d279-40ae-8edf-a01189809528","Type":"ContainerDied","Data":"d92490bc6dda743fee760e8e71e03893eba312a43ed129420474d7a876f92f85"} Oct 02 22:01:23 crc kubenswrapper[4636]: I1002 22:01:23.197595 4636 generic.go:334] "Generic (PLEG): container finished" podID="c30b6801-d279-40ae-8edf-a01189809528" containerID="d92490bc6dda743fee760e8e71e03893eba312a43ed129420474d7a876f92f85" exitCode=0 Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.647009 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.745543 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-nova-metadata-neutron-config-0\") pod \"c30b6801-d279-40ae-8edf-a01189809528\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.745599 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-ovn-metadata-agent-neutron-config-0\") pod \"c30b6801-d279-40ae-8edf-a01189809528\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.745684 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-ssh-key\") pod \"c30b6801-d279-40ae-8edf-a01189809528\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.745705 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-inventory\") pod \"c30b6801-d279-40ae-8edf-a01189809528\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.745927 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-metadata-combined-ca-bundle\") pod \"c30b6801-d279-40ae-8edf-a01189809528\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.745949 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcbfc\" (UniqueName: \"kubernetes.io/projected/c30b6801-d279-40ae-8edf-a01189809528-kube-api-access-zcbfc\") pod \"c30b6801-d279-40ae-8edf-a01189809528\" (UID: \"c30b6801-d279-40ae-8edf-a01189809528\") " Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.751661 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "c30b6801-d279-40ae-8edf-a01189809528" (UID: "c30b6801-d279-40ae-8edf-a01189809528"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.753591 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c30b6801-d279-40ae-8edf-a01189809528-kube-api-access-zcbfc" (OuterVolumeSpecName: "kube-api-access-zcbfc") pod "c30b6801-d279-40ae-8edf-a01189809528" (UID: "c30b6801-d279-40ae-8edf-a01189809528"). InnerVolumeSpecName "kube-api-access-zcbfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.798985 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "c30b6801-d279-40ae-8edf-a01189809528" (UID: "c30b6801-d279-40ae-8edf-a01189809528"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.799450 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "c30b6801-d279-40ae-8edf-a01189809528" (UID: "c30b6801-d279-40ae-8edf-a01189809528"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.812434 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c30b6801-d279-40ae-8edf-a01189809528" (UID: "c30b6801-d279-40ae-8edf-a01189809528"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.816220 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-inventory" (OuterVolumeSpecName: "inventory") pod "c30b6801-d279-40ae-8edf-a01189809528" (UID: "c30b6801-d279-40ae-8edf-a01189809528"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.856891 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcbfc\" (UniqueName: \"kubernetes.io/projected/c30b6801-d279-40ae-8edf-a01189809528-kube-api-access-zcbfc\") on node \"crc\" DevicePath \"\"" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.856927 4636 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.856938 4636 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.856949 4636 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.856966 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 22:01:24 crc kubenswrapper[4636]: I1002 22:01:24.858281 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c30b6801-d279-40ae-8edf-a01189809528-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.216794 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" event={"ID":"c30b6801-d279-40ae-8edf-a01189809528","Type":"ContainerDied","Data":"8c7bd3479c50d04483b166d690efa3fb49ae3859dabc6b90e5d4d30ae723a351"} Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.216852 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c7bd3479c50d04483b166d690efa3fb49ae3859dabc6b90e5d4d30ae723a351" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.216849 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.301703 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt"] Oct 02 22:01:25 crc kubenswrapper[4636]: E1002 22:01:25.302322 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30b6801-d279-40ae-8edf-a01189809528" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.302339 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30b6801-d279-40ae-8edf-a01189809528" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 02 22:01:25 crc kubenswrapper[4636]: E1002 22:01:25.302350 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831000fc-4ab3-4609-b42f-f45920de8917" containerName="keystone-cron" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.302356 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="831000fc-4ab3-4609-b42f-f45920de8917" containerName="keystone-cron" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.302529 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="c30b6801-d279-40ae-8edf-a01189809528" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.302560 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="831000fc-4ab3-4609-b42f-f45920de8917" containerName="keystone-cron" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.303154 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.305353 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.306374 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.305379 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.308122 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.308473 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.357201 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt"] Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.469796 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.469857 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.469881 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.470248 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbwlp\" (UniqueName: \"kubernetes.io/projected/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-kube-api-access-sbwlp\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.470308 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.572514 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.572568 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.572588 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.572685 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbwlp\" (UniqueName: \"kubernetes.io/projected/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-kube-api-access-sbwlp\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.572702 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.578267 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.578412 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.578414 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.586426 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.594576 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbwlp\" (UniqueName: \"kubernetes.io/projected/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-kube-api-access-sbwlp\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.625026 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:01:25 crc kubenswrapper[4636]: I1002 22:01:25.975284 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt"] Oct 02 22:01:26 crc kubenswrapper[4636]: I1002 22:01:26.224933 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" event={"ID":"3e7736a9-7b5a-4d65-ad6e-8814a0a23506","Type":"ContainerStarted","Data":"8b78f08b17de5f775d2959ff5d4e9f2ada6baca3a6dfb73c71670f4ad937f580"} Oct 02 22:01:27 crc kubenswrapper[4636]: I1002 22:01:27.235949 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" event={"ID":"3e7736a9-7b5a-4d65-ad6e-8814a0a23506","Type":"ContainerStarted","Data":"121539802c6f9d4fd712a44c9f83065886eef2cd0c4c6ecacd58daec40e6ab7d"} Oct 02 22:01:27 crc kubenswrapper[4636]: I1002 22:01:27.255442 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" podStartSLOduration=2.116141221 podStartE2EDuration="2.255416629s" podCreationTimestamp="2025-10-02 22:01:25 +0000 UTC" firstStartedPulling="2025-10-02 22:01:25.979334259 +0000 UTC m=+2277.302342278" lastFinishedPulling="2025-10-02 22:01:26.118609667 +0000 UTC m=+2277.441617686" observedRunningTime="2025-10-02 22:01:27.251620857 +0000 UTC m=+2278.574628916" watchObservedRunningTime="2025-10-02 22:01:27.255416629 +0000 UTC m=+2278.578424678" Oct 02 22:01:27 crc kubenswrapper[4636]: I1002 22:01:27.604077 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:01:27 crc kubenswrapper[4636]: E1002 22:01:27.604590 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:01:38 crc kubenswrapper[4636]: I1002 22:01:38.604598 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:01:38 crc kubenswrapper[4636]: E1002 22:01:38.605596 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:01:49 crc kubenswrapper[4636]: I1002 22:01:49.615306 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:01:49 crc kubenswrapper[4636]: E1002 22:01:49.616299 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:02:02 crc kubenswrapper[4636]: I1002 22:02:02.603513 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:02:02 crc kubenswrapper[4636]: E1002 22:02:02.604332 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.075702 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8k8vp"] Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.081514 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.105152 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8k8vp"] Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.191987 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-utilities\") pod \"certified-operators-8k8vp\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.192052 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-catalog-content\") pod \"certified-operators-8k8vp\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.192318 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcxwc\" (UniqueName: \"kubernetes.io/projected/87467519-a8fd-447f-a841-42413dbbd3b2-kube-api-access-xcxwc\") pod \"certified-operators-8k8vp\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.293462 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-utilities\") pod \"certified-operators-8k8vp\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.293542 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-catalog-content\") pod \"certified-operators-8k8vp\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.293608 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcxwc\" (UniqueName: \"kubernetes.io/projected/87467519-a8fd-447f-a841-42413dbbd3b2-kube-api-access-xcxwc\") pod \"certified-operators-8k8vp\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.294042 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-catalog-content\") pod \"certified-operators-8k8vp\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.294098 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-utilities\") pod \"certified-operators-8k8vp\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.319636 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcxwc\" (UniqueName: \"kubernetes.io/projected/87467519-a8fd-447f-a841-42413dbbd3b2-kube-api-access-xcxwc\") pod \"certified-operators-8k8vp\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.410189 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:04 crc kubenswrapper[4636]: I1002 22:02:04.950839 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8k8vp"] Oct 02 22:02:05 crc kubenswrapper[4636]: I1002 22:02:05.605726 4636 generic.go:334] "Generic (PLEG): container finished" podID="87467519-a8fd-447f-a841-42413dbbd3b2" containerID="6794451c7a32ac325d145215a6bbbefb3f142831cf732078ec6e8561583d0bd8" exitCode=0 Oct 02 22:02:05 crc kubenswrapper[4636]: I1002 22:02:05.610728 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 22:02:05 crc kubenswrapper[4636]: I1002 22:02:05.616220 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8k8vp" event={"ID":"87467519-a8fd-447f-a841-42413dbbd3b2","Type":"ContainerDied","Data":"6794451c7a32ac325d145215a6bbbefb3f142831cf732078ec6e8561583d0bd8"} Oct 02 22:02:05 crc kubenswrapper[4636]: I1002 22:02:05.616246 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8k8vp" event={"ID":"87467519-a8fd-447f-a841-42413dbbd3b2","Type":"ContainerStarted","Data":"06f6138b186f64e39ae937ffa1fa49e7217f60ecef568f151d9a67841e681c45"} Oct 02 22:02:06 crc kubenswrapper[4636]: I1002 22:02:06.617586 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8k8vp" event={"ID":"87467519-a8fd-447f-a841-42413dbbd3b2","Type":"ContainerStarted","Data":"9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6"} Oct 02 22:02:08 crc kubenswrapper[4636]: I1002 22:02:08.641184 4636 generic.go:334] "Generic (PLEG): container finished" podID="87467519-a8fd-447f-a841-42413dbbd3b2" containerID="9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6" exitCode=0 Oct 02 22:02:08 crc kubenswrapper[4636]: I1002 22:02:08.641300 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8k8vp" event={"ID":"87467519-a8fd-447f-a841-42413dbbd3b2","Type":"ContainerDied","Data":"9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6"} Oct 02 22:02:09 crc kubenswrapper[4636]: I1002 22:02:09.659878 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8k8vp" event={"ID":"87467519-a8fd-447f-a841-42413dbbd3b2","Type":"ContainerStarted","Data":"70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757"} Oct 02 22:02:09 crc kubenswrapper[4636]: I1002 22:02:09.683630 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8k8vp" podStartSLOduration=1.9904259 podStartE2EDuration="5.683615176s" podCreationTimestamp="2025-10-02 22:02:04 +0000 UTC" firstStartedPulling="2025-10-02 22:02:05.610526117 +0000 UTC m=+2316.933534136" lastFinishedPulling="2025-10-02 22:02:09.303715383 +0000 UTC m=+2320.626723412" observedRunningTime="2025-10-02 22:02:09.678022676 +0000 UTC m=+2321.001030695" watchObservedRunningTime="2025-10-02 22:02:09.683615176 +0000 UTC m=+2321.006623195" Oct 02 22:02:14 crc kubenswrapper[4636]: I1002 22:02:14.410761 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:14 crc kubenswrapper[4636]: I1002 22:02:14.412112 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:14 crc kubenswrapper[4636]: I1002 22:02:14.493330 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:14 crc kubenswrapper[4636]: I1002 22:02:14.765946 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:14 crc kubenswrapper[4636]: I1002 22:02:14.821590 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8k8vp"] Oct 02 22:02:16 crc kubenswrapper[4636]: I1002 22:02:16.715782 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8k8vp" podUID="87467519-a8fd-447f-a841-42413dbbd3b2" containerName="registry-server" containerID="cri-o://70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757" gracePeriod=2 Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.117854 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.180781 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-utilities\") pod \"87467519-a8fd-447f-a841-42413dbbd3b2\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.180917 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcxwc\" (UniqueName: \"kubernetes.io/projected/87467519-a8fd-447f-a841-42413dbbd3b2-kube-api-access-xcxwc\") pod \"87467519-a8fd-447f-a841-42413dbbd3b2\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.181006 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-catalog-content\") pod \"87467519-a8fd-447f-a841-42413dbbd3b2\" (UID: \"87467519-a8fd-447f-a841-42413dbbd3b2\") " Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.181744 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-utilities" (OuterVolumeSpecName: "utilities") pod "87467519-a8fd-447f-a841-42413dbbd3b2" (UID: "87467519-a8fd-447f-a841-42413dbbd3b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.198583 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87467519-a8fd-447f-a841-42413dbbd3b2-kube-api-access-xcxwc" (OuterVolumeSpecName: "kube-api-access-xcxwc") pod "87467519-a8fd-447f-a841-42413dbbd3b2" (UID: "87467519-a8fd-447f-a841-42413dbbd3b2"). InnerVolumeSpecName "kube-api-access-xcxwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.233110 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "87467519-a8fd-447f-a841-42413dbbd3b2" (UID: "87467519-a8fd-447f-a841-42413dbbd3b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.283457 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcxwc\" (UniqueName: \"kubernetes.io/projected/87467519-a8fd-447f-a841-42413dbbd3b2-kube-api-access-xcxwc\") on node \"crc\" DevicePath \"\"" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.283671 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.283741 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87467519-a8fd-447f-a841-42413dbbd3b2-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.604183 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:02:17 crc kubenswrapper[4636]: E1002 22:02:17.604492 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.725580 4636 generic.go:334] "Generic (PLEG): container finished" podID="87467519-a8fd-447f-a841-42413dbbd3b2" containerID="70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757" exitCode=0 Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.725620 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8k8vp" event={"ID":"87467519-a8fd-447f-a841-42413dbbd3b2","Type":"ContainerDied","Data":"70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757"} Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.725631 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8k8vp" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.725647 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8k8vp" event={"ID":"87467519-a8fd-447f-a841-42413dbbd3b2","Type":"ContainerDied","Data":"06f6138b186f64e39ae937ffa1fa49e7217f60ecef568f151d9a67841e681c45"} Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.725664 4636 scope.go:117] "RemoveContainer" containerID="70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.753197 4636 scope.go:117] "RemoveContainer" containerID="9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.758097 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8k8vp"] Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.767041 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8k8vp"] Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.801587 4636 scope.go:117] "RemoveContainer" containerID="6794451c7a32ac325d145215a6bbbefb3f142831cf732078ec6e8561583d0bd8" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.846293 4636 scope.go:117] "RemoveContainer" containerID="70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757" Oct 02 22:02:17 crc kubenswrapper[4636]: E1002 22:02:17.847157 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757\": container with ID starting with 70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757 not found: ID does not exist" containerID="70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.847198 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757"} err="failed to get container status \"70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757\": rpc error: code = NotFound desc = could not find container \"70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757\": container with ID starting with 70f94a1822fb371311ca431b269455fd56e1840d9d07797c5759c82ab59c0757 not found: ID does not exist" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.847223 4636 scope.go:117] "RemoveContainer" containerID="9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6" Oct 02 22:02:17 crc kubenswrapper[4636]: E1002 22:02:17.854889 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6\": container with ID starting with 9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6 not found: ID does not exist" containerID="9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.854935 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6"} err="failed to get container status \"9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6\": rpc error: code = NotFound desc = could not find container \"9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6\": container with ID starting with 9819b6216871be9371d943f6c1eb33e31532dea506d22d39511b06a39dc3f3b6 not found: ID does not exist" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.854964 4636 scope.go:117] "RemoveContainer" containerID="6794451c7a32ac325d145215a6bbbefb3f142831cf732078ec6e8561583d0bd8" Oct 02 22:02:17 crc kubenswrapper[4636]: E1002 22:02:17.860122 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6794451c7a32ac325d145215a6bbbefb3f142831cf732078ec6e8561583d0bd8\": container with ID starting with 6794451c7a32ac325d145215a6bbbefb3f142831cf732078ec6e8561583d0bd8 not found: ID does not exist" containerID="6794451c7a32ac325d145215a6bbbefb3f142831cf732078ec6e8561583d0bd8" Oct 02 22:02:17 crc kubenswrapper[4636]: I1002 22:02:17.860156 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6794451c7a32ac325d145215a6bbbefb3f142831cf732078ec6e8561583d0bd8"} err="failed to get container status \"6794451c7a32ac325d145215a6bbbefb3f142831cf732078ec6e8561583d0bd8\": rpc error: code = NotFound desc = could not find container \"6794451c7a32ac325d145215a6bbbefb3f142831cf732078ec6e8561583d0bd8\": container with ID starting with 6794451c7a32ac325d145215a6bbbefb3f142831cf732078ec6e8561583d0bd8 not found: ID does not exist" Oct 02 22:02:19 crc kubenswrapper[4636]: I1002 22:02:19.635729 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87467519-a8fd-447f-a841-42413dbbd3b2" path="/var/lib/kubelet/pods/87467519-a8fd-447f-a841-42413dbbd3b2/volumes" Oct 02 22:02:28 crc kubenswrapper[4636]: I1002 22:02:28.604066 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:02:28 crc kubenswrapper[4636]: E1002 22:02:28.604702 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:02:42 crc kubenswrapper[4636]: I1002 22:02:42.604044 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:02:42 crc kubenswrapper[4636]: E1002 22:02:42.605144 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:02:54 crc kubenswrapper[4636]: I1002 22:02:54.603981 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:02:54 crc kubenswrapper[4636]: E1002 22:02:54.604674 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:03:06 crc kubenswrapper[4636]: I1002 22:03:06.603586 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:03:06 crc kubenswrapper[4636]: E1002 22:03:06.604237 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:03:17 crc kubenswrapper[4636]: I1002 22:03:17.604151 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:03:17 crc kubenswrapper[4636]: E1002 22:03:17.604915 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:03:32 crc kubenswrapper[4636]: I1002 22:03:32.608279 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:03:32 crc kubenswrapper[4636]: E1002 22:03:32.609349 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:03:44 crc kubenswrapper[4636]: I1002 22:03:44.604507 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:03:44 crc kubenswrapper[4636]: E1002 22:03:44.605420 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:03:59 crc kubenswrapper[4636]: I1002 22:03:59.608570 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:03:59 crc kubenswrapper[4636]: E1002 22:03:59.609382 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:04:10 crc kubenswrapper[4636]: I1002 22:04:10.604220 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:04:10 crc kubenswrapper[4636]: E1002 22:04:10.605275 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:04:21 crc kubenswrapper[4636]: I1002 22:04:21.604693 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:04:21 crc kubenswrapper[4636]: E1002 22:04:21.605354 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:04:34 crc kubenswrapper[4636]: I1002 22:04:34.603411 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:04:34 crc kubenswrapper[4636]: E1002 22:04:34.605674 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:04:46 crc kubenswrapper[4636]: I1002 22:04:46.604672 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:04:46 crc kubenswrapper[4636]: E1002 22:04:46.606306 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:05:01 crc kubenswrapper[4636]: I1002 22:05:01.603998 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:05:01 crc kubenswrapper[4636]: E1002 22:05:01.604800 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:05:12 crc kubenswrapper[4636]: I1002 22:05:12.603800 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:05:12 crc kubenswrapper[4636]: E1002 22:05:12.604620 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:05:23 crc kubenswrapper[4636]: I1002 22:05:23.604802 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:05:24 crc kubenswrapper[4636]: I1002 22:05:24.556603 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"c57f702623d78c15b7af8e1c0519f45dc833960a67c40dee84b7b18cf3b91b6c"} Oct 02 22:06:01 crc kubenswrapper[4636]: I1002 22:06:01.913618 4636 generic.go:334] "Generic (PLEG): container finished" podID="3e7736a9-7b5a-4d65-ad6e-8814a0a23506" containerID="121539802c6f9d4fd712a44c9f83065886eef2cd0c4c6ecacd58daec40e6ab7d" exitCode=0 Oct 02 22:06:01 crc kubenswrapper[4636]: I1002 22:06:01.913717 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" event={"ID":"3e7736a9-7b5a-4d65-ad6e-8814a0a23506","Type":"ContainerDied","Data":"121539802c6f9d4fd712a44c9f83065886eef2cd0c4c6ecacd58daec40e6ab7d"} Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.374842 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.488568 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-secret-0\") pod \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.488668 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-inventory\") pod \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.489515 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-combined-ca-bundle\") pod \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.489545 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-ssh-key\") pod \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.490041 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbwlp\" (UniqueName: \"kubernetes.io/projected/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-kube-api-access-sbwlp\") pod \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\" (UID: \"3e7736a9-7b5a-4d65-ad6e-8814a0a23506\") " Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.497840 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "3e7736a9-7b5a-4d65-ad6e-8814a0a23506" (UID: "3e7736a9-7b5a-4d65-ad6e-8814a0a23506"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.503808 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-kube-api-access-sbwlp" (OuterVolumeSpecName: "kube-api-access-sbwlp") pod "3e7736a9-7b5a-4d65-ad6e-8814a0a23506" (UID: "3e7736a9-7b5a-4d65-ad6e-8814a0a23506"). InnerVolumeSpecName "kube-api-access-sbwlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.521624 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3e7736a9-7b5a-4d65-ad6e-8814a0a23506" (UID: "3e7736a9-7b5a-4d65-ad6e-8814a0a23506"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.527491 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "3e7736a9-7b5a-4d65-ad6e-8814a0a23506" (UID: "3e7736a9-7b5a-4d65-ad6e-8814a0a23506"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.543070 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-inventory" (OuterVolumeSpecName: "inventory") pod "3e7736a9-7b5a-4d65-ad6e-8814a0a23506" (UID: "3e7736a9-7b5a-4d65-ad6e-8814a0a23506"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.592407 4636 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.592442 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.592453 4636 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.592463 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.592472 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbwlp\" (UniqueName: \"kubernetes.io/projected/3e7736a9-7b5a-4d65-ad6e-8814a0a23506-kube-api-access-sbwlp\") on node \"crc\" DevicePath \"\"" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.936540 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" event={"ID":"3e7736a9-7b5a-4d65-ad6e-8814a0a23506","Type":"ContainerDied","Data":"8b78f08b17de5f775d2959ff5d4e9f2ada6baca3a6dfb73c71670f4ad937f580"} Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.936846 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b78f08b17de5f775d2959ff5d4e9f2ada6baca3a6dfb73c71670f4ad937f580" Oct 02 22:06:03 crc kubenswrapper[4636]: I1002 22:06:03.936589 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.094177 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67"] Oct 02 22:06:04 crc kubenswrapper[4636]: E1002 22:06:04.094593 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87467519-a8fd-447f-a841-42413dbbd3b2" containerName="extract-utilities" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.094611 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="87467519-a8fd-447f-a841-42413dbbd3b2" containerName="extract-utilities" Oct 02 22:06:04 crc kubenswrapper[4636]: E1002 22:06:04.094623 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87467519-a8fd-447f-a841-42413dbbd3b2" containerName="registry-server" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.094630 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="87467519-a8fd-447f-a841-42413dbbd3b2" containerName="registry-server" Oct 02 22:06:04 crc kubenswrapper[4636]: E1002 22:06:04.094648 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e7736a9-7b5a-4d65-ad6e-8814a0a23506" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.094654 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e7736a9-7b5a-4d65-ad6e-8814a0a23506" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 02 22:06:04 crc kubenswrapper[4636]: E1002 22:06:04.094664 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87467519-a8fd-447f-a841-42413dbbd3b2" containerName="extract-content" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.094669 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="87467519-a8fd-447f-a841-42413dbbd3b2" containerName="extract-content" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.094843 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="87467519-a8fd-447f-a841-42413dbbd3b2" containerName="registry-server" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.094870 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e7736a9-7b5a-4d65-ad6e-8814a0a23506" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.095427 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.098151 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.101049 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.101230 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.101372 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.101480 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.101576 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.101620 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.112293 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67"] Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.203375 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.203428 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.203455 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.203624 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.203857 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.203941 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.203968 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.204019 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvtbm\" (UniqueName: \"kubernetes.io/projected/d5f9ecc8-8087-42ef-97bb-b083a26f8272-kube-api-access-cvtbm\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.204054 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.305842 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.305905 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.305952 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.305992 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.306061 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.306095 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.306117 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.306144 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvtbm\" (UniqueName: \"kubernetes.io/projected/d5f9ecc8-8087-42ef-97bb-b083a26f8272-kube-api-access-cvtbm\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.306167 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.307826 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.310121 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.310528 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.311082 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.311111 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.311581 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.311621 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.312077 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.331586 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvtbm\" (UniqueName: \"kubernetes.io/projected/d5f9ecc8-8087-42ef-97bb-b083a26f8272-kube-api-access-cvtbm\") pod \"nova-edpm-deployment-openstack-edpm-ipam-mwm67\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.413987 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:06:04 crc kubenswrapper[4636]: I1002 22:06:04.942272 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67"] Oct 02 22:06:04 crc kubenswrapper[4636]: W1002 22:06:04.945678 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5f9ecc8_8087_42ef_97bb_b083a26f8272.slice/crio-f1584ba88bf3459c52da48263992af8d852b99dfe250952d94ae7e4ac47e9962 WatchSource:0}: Error finding container f1584ba88bf3459c52da48263992af8d852b99dfe250952d94ae7e4ac47e9962: Status 404 returned error can't find the container with id f1584ba88bf3459c52da48263992af8d852b99dfe250952d94ae7e4ac47e9962 Oct 02 22:06:05 crc kubenswrapper[4636]: I1002 22:06:05.955607 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" event={"ID":"d5f9ecc8-8087-42ef-97bb-b083a26f8272","Type":"ContainerStarted","Data":"3a31c9001fa7c04bf6596b1ea86c736ca987cca0426d6d01f3abd5570cfca569"} Oct 02 22:06:05 crc kubenswrapper[4636]: I1002 22:06:05.956008 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" event={"ID":"d5f9ecc8-8087-42ef-97bb-b083a26f8272","Type":"ContainerStarted","Data":"f1584ba88bf3459c52da48263992af8d852b99dfe250952d94ae7e4ac47e9962"} Oct 02 22:06:05 crc kubenswrapper[4636]: I1002 22:06:05.977860 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" podStartSLOduration=1.766054128 podStartE2EDuration="1.977843592s" podCreationTimestamp="2025-10-02 22:06:04 +0000 UTC" firstStartedPulling="2025-10-02 22:06:04.948313162 +0000 UTC m=+2556.271321201" lastFinishedPulling="2025-10-02 22:06:05.160102646 +0000 UTC m=+2556.483110665" observedRunningTime="2025-10-02 22:06:05.97288994 +0000 UTC m=+2557.295897959" watchObservedRunningTime="2025-10-02 22:06:05.977843592 +0000 UTC m=+2557.300851611" Oct 02 22:07:23 crc kubenswrapper[4636]: I1002 22:07:23.117230 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:07:23 crc kubenswrapper[4636]: I1002 22:07:23.118013 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:07:53 crc kubenswrapper[4636]: I1002 22:07:53.117242 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:07:53 crc kubenswrapper[4636]: I1002 22:07:53.117955 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:08:23 crc kubenswrapper[4636]: I1002 22:08:23.117361 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:08:23 crc kubenswrapper[4636]: I1002 22:08:23.118042 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:08:23 crc kubenswrapper[4636]: I1002 22:08:23.118103 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 22:08:23 crc kubenswrapper[4636]: I1002 22:08:23.119170 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c57f702623d78c15b7af8e1c0519f45dc833960a67c40dee84b7b18cf3b91b6c"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 22:08:23 crc kubenswrapper[4636]: I1002 22:08:23.119235 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://c57f702623d78c15b7af8e1c0519f45dc833960a67c40dee84b7b18cf3b91b6c" gracePeriod=600 Oct 02 22:08:23 crc kubenswrapper[4636]: I1002 22:08:23.304966 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="c57f702623d78c15b7af8e1c0519f45dc833960a67c40dee84b7b18cf3b91b6c" exitCode=0 Oct 02 22:08:23 crc kubenswrapper[4636]: I1002 22:08:23.305030 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"c57f702623d78c15b7af8e1c0519f45dc833960a67c40dee84b7b18cf3b91b6c"} Oct 02 22:08:23 crc kubenswrapper[4636]: I1002 22:08:23.305105 4636 scope.go:117] "RemoveContainer" containerID="40bd9ef481fcfaf74e5c9faae43ca02896abd25c08c67081e2e7dd829da55b35" Oct 02 22:08:24 crc kubenswrapper[4636]: I1002 22:08:24.323959 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5"} Oct 02 22:09:53 crc kubenswrapper[4636]: I1002 22:09:53.144902 4636 generic.go:334] "Generic (PLEG): container finished" podID="d5f9ecc8-8087-42ef-97bb-b083a26f8272" containerID="3a31c9001fa7c04bf6596b1ea86c736ca987cca0426d6d01f3abd5570cfca569" exitCode=0 Oct 02 22:09:53 crc kubenswrapper[4636]: I1002 22:09:53.144974 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" event={"ID":"d5f9ecc8-8087-42ef-97bb-b083a26f8272","Type":"ContainerDied","Data":"3a31c9001fa7c04bf6596b1ea86c736ca987cca0426d6d01f3abd5570cfca569"} Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.579838 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.664460 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvtbm\" (UniqueName: \"kubernetes.io/projected/d5f9ecc8-8087-42ef-97bb-b083a26f8272-kube-api-access-cvtbm\") pod \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.664535 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-ssh-key\") pod \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.664555 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-extra-config-0\") pod \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.664576 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-0\") pod \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.664625 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-1\") pod \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.664652 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-1\") pod \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.664677 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-combined-ca-bundle\") pod \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.664698 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-inventory\") pod \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.664739 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-0\") pod \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\" (UID: \"d5f9ecc8-8087-42ef-97bb-b083a26f8272\") " Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.669860 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5f9ecc8-8087-42ef-97bb-b083a26f8272-kube-api-access-cvtbm" (OuterVolumeSpecName: "kube-api-access-cvtbm") pod "d5f9ecc8-8087-42ef-97bb-b083a26f8272" (UID: "d5f9ecc8-8087-42ef-97bb-b083a26f8272"). InnerVolumeSpecName "kube-api-access-cvtbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.671211 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "d5f9ecc8-8087-42ef-97bb-b083a26f8272" (UID: "d5f9ecc8-8087-42ef-97bb-b083a26f8272"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.695869 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "d5f9ecc8-8087-42ef-97bb-b083a26f8272" (UID: "d5f9ecc8-8087-42ef-97bb-b083a26f8272"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.698956 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "d5f9ecc8-8087-42ef-97bb-b083a26f8272" (UID: "d5f9ecc8-8087-42ef-97bb-b083a26f8272"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.707810 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "d5f9ecc8-8087-42ef-97bb-b083a26f8272" (UID: "d5f9ecc8-8087-42ef-97bb-b083a26f8272"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.710890 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "d5f9ecc8-8087-42ef-97bb-b083a26f8272" (UID: "d5f9ecc8-8087-42ef-97bb-b083a26f8272"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.718295 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d5f9ecc8-8087-42ef-97bb-b083a26f8272" (UID: "d5f9ecc8-8087-42ef-97bb-b083a26f8272"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.720972 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-inventory" (OuterVolumeSpecName: "inventory") pod "d5f9ecc8-8087-42ef-97bb-b083a26f8272" (UID: "d5f9ecc8-8087-42ef-97bb-b083a26f8272"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.753018 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "d5f9ecc8-8087-42ef-97bb-b083a26f8272" (UID: "d5f9ecc8-8087-42ef-97bb-b083a26f8272"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.766024 4636 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.766054 4636 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.766063 4636 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.766073 4636 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.766082 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.766090 4636 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.766099 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvtbm\" (UniqueName: \"kubernetes.io/projected/d5f9ecc8-8087-42ef-97bb-b083a26f8272-kube-api-access-cvtbm\") on node \"crc\" DevicePath \"\"" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.766107 4636 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d5f9ecc8-8087-42ef-97bb-b083a26f8272-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 02 22:09:54 crc kubenswrapper[4636]: I1002 22:09:54.766115 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5f9ecc8-8087-42ef-97bb-b083a26f8272-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.161692 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" event={"ID":"d5f9ecc8-8087-42ef-97bb-b083a26f8272","Type":"ContainerDied","Data":"f1584ba88bf3459c52da48263992af8d852b99dfe250952d94ae7e4ac47e9962"} Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.161970 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1584ba88bf3459c52da48263992af8d852b99dfe250952d94ae7e4ac47e9962" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.161851 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-mwm67" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.311677 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb"] Oct 02 22:09:55 crc kubenswrapper[4636]: E1002 22:09:55.312060 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5f9ecc8-8087-42ef-97bb-b083a26f8272" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.312076 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5f9ecc8-8087-42ef-97bb-b083a26f8272" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.312295 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5f9ecc8-8087-42ef-97bb-b083a26f8272" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.312949 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.314902 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.315070 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xcwg2" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.315310 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.315514 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.315637 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.359389 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb"] Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.375920 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.376024 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbmc6\" (UniqueName: \"kubernetes.io/projected/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-kube-api-access-pbmc6\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.376071 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.376119 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.376204 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.376251 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.376290 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.477332 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.477429 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.477473 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.477505 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.477521 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbmc6\" (UniqueName: \"kubernetes.io/projected/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-kube-api-access-pbmc6\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.477609 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.477652 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.481458 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.481511 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.482460 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.483028 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.486379 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.493831 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.496367 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbmc6\" (UniqueName: \"kubernetes.io/projected/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-kube-api-access-pbmc6\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:55 crc kubenswrapper[4636]: I1002 22:09:55.627789 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:09:56 crc kubenswrapper[4636]: I1002 22:09:56.133681 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb"] Oct 02 22:09:56 crc kubenswrapper[4636]: I1002 22:09:56.138128 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 22:09:56 crc kubenswrapper[4636]: I1002 22:09:56.175675 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" event={"ID":"cf879426-52a4-4ab6-9271-6b9e0c74ebfb","Type":"ContainerStarted","Data":"6478f3ba8c8674c0f66ce12d0fb37081116a8d84fab5f2d3a2227d5a01eb27ed"} Oct 02 22:09:57 crc kubenswrapper[4636]: I1002 22:09:57.185825 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" event={"ID":"cf879426-52a4-4ab6-9271-6b9e0c74ebfb","Type":"ContainerStarted","Data":"1de6a0dbe1bb88a46693c4fb6b7b90702c629040e282443067a4644479983a9e"} Oct 02 22:09:57 crc kubenswrapper[4636]: I1002 22:09:57.222855 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" podStartSLOduration=2.058246776 podStartE2EDuration="2.222835997s" podCreationTimestamp="2025-10-02 22:09:55 +0000 UTC" firstStartedPulling="2025-10-02 22:09:56.137922385 +0000 UTC m=+2787.460930404" lastFinishedPulling="2025-10-02 22:09:56.302511606 +0000 UTC m=+2787.625519625" observedRunningTime="2025-10-02 22:09:57.215886271 +0000 UTC m=+2788.538894330" watchObservedRunningTime="2025-10-02 22:09:57.222835997 +0000 UTC m=+2788.545844036" Oct 02 22:10:23 crc kubenswrapper[4636]: I1002 22:10:23.117943 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:10:23 crc kubenswrapper[4636]: I1002 22:10:23.118532 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.353415 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gtzqk"] Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.355484 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.366598 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gtzqk"] Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.541435 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlqzr\" (UniqueName: \"kubernetes.io/projected/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-kube-api-access-qlqzr\") pod \"redhat-operators-gtzqk\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.541820 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-catalog-content\") pod \"redhat-operators-gtzqk\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.541978 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-utilities\") pod \"redhat-operators-gtzqk\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.643921 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlqzr\" (UniqueName: \"kubernetes.io/projected/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-kube-api-access-qlqzr\") pod \"redhat-operators-gtzqk\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.644042 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-catalog-content\") pod \"redhat-operators-gtzqk\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.644531 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-catalog-content\") pod \"redhat-operators-gtzqk\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.644571 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-utilities\") pod \"redhat-operators-gtzqk\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.644578 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-utilities\") pod \"redhat-operators-gtzqk\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.662508 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlqzr\" (UniqueName: \"kubernetes.io/projected/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-kube-api-access-qlqzr\") pod \"redhat-operators-gtzqk\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:24 crc kubenswrapper[4636]: I1002 22:10:24.698488 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:25 crc kubenswrapper[4636]: I1002 22:10:25.202461 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gtzqk"] Oct 02 22:10:25 crc kubenswrapper[4636]: I1002 22:10:25.473244 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtzqk" event={"ID":"227ca7a7-8851-4f84-bca8-bccdc6df4b5d","Type":"ContainerStarted","Data":"34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437"} Oct 02 22:10:25 crc kubenswrapper[4636]: I1002 22:10:25.473470 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtzqk" event={"ID":"227ca7a7-8851-4f84-bca8-bccdc6df4b5d","Type":"ContainerStarted","Data":"0152d18ee4dc77cb898994d01f85aab5fabc6a8d55380acaf90db4f1aaca90f6"} Oct 02 22:10:26 crc kubenswrapper[4636]: I1002 22:10:26.483672 4636 generic.go:334] "Generic (PLEG): container finished" podID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerID="34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437" exitCode=0 Oct 02 22:10:26 crc kubenswrapper[4636]: I1002 22:10:26.483932 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtzqk" event={"ID":"227ca7a7-8851-4f84-bca8-bccdc6df4b5d","Type":"ContainerDied","Data":"34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437"} Oct 02 22:10:27 crc kubenswrapper[4636]: I1002 22:10:27.496133 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtzqk" event={"ID":"227ca7a7-8851-4f84-bca8-bccdc6df4b5d","Type":"ContainerStarted","Data":"f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b"} Oct 02 22:10:30 crc kubenswrapper[4636]: I1002 22:10:30.521339 4636 generic.go:334] "Generic (PLEG): container finished" podID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerID="f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b" exitCode=0 Oct 02 22:10:30 crc kubenswrapper[4636]: I1002 22:10:30.521441 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtzqk" event={"ID":"227ca7a7-8851-4f84-bca8-bccdc6df4b5d","Type":"ContainerDied","Data":"f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b"} Oct 02 22:10:31 crc kubenswrapper[4636]: I1002 22:10:31.547726 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtzqk" event={"ID":"227ca7a7-8851-4f84-bca8-bccdc6df4b5d","Type":"ContainerStarted","Data":"b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439"} Oct 02 22:10:31 crc kubenswrapper[4636]: I1002 22:10:31.569455 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gtzqk" podStartSLOduration=2.11189357 podStartE2EDuration="7.569437531s" podCreationTimestamp="2025-10-02 22:10:24 +0000 UTC" firstStartedPulling="2025-10-02 22:10:25.475104836 +0000 UTC m=+2816.798112855" lastFinishedPulling="2025-10-02 22:10:30.932648797 +0000 UTC m=+2822.255656816" observedRunningTime="2025-10-02 22:10:31.563655136 +0000 UTC m=+2822.886663165" watchObservedRunningTime="2025-10-02 22:10:31.569437531 +0000 UTC m=+2822.892445550" Oct 02 22:10:34 crc kubenswrapper[4636]: I1002 22:10:34.699677 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:34 crc kubenswrapper[4636]: I1002 22:10:34.700031 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:35 crc kubenswrapper[4636]: I1002 22:10:35.754716 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gtzqk" podUID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerName="registry-server" probeResult="failure" output=< Oct 02 22:10:35 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 22:10:35 crc kubenswrapper[4636]: > Oct 02 22:10:44 crc kubenswrapper[4636]: I1002 22:10:44.773082 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:44 crc kubenswrapper[4636]: I1002 22:10:44.859314 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:45 crc kubenswrapper[4636]: I1002 22:10:45.009531 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gtzqk"] Oct 02 22:10:46 crc kubenswrapper[4636]: I1002 22:10:46.681285 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gtzqk" podUID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerName="registry-server" containerID="cri-o://b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439" gracePeriod=2 Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.126352 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.171874 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-utilities\") pod \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.172000 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-catalog-content\") pod \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.172301 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlqzr\" (UniqueName: \"kubernetes.io/projected/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-kube-api-access-qlqzr\") pod \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\" (UID: \"227ca7a7-8851-4f84-bca8-bccdc6df4b5d\") " Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.172779 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-utilities" (OuterVolumeSpecName: "utilities") pod "227ca7a7-8851-4f84-bca8-bccdc6df4b5d" (UID: "227ca7a7-8851-4f84-bca8-bccdc6df4b5d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.173644 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.184008 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-kube-api-access-qlqzr" (OuterVolumeSpecName: "kube-api-access-qlqzr") pod "227ca7a7-8851-4f84-bca8-bccdc6df4b5d" (UID: "227ca7a7-8851-4f84-bca8-bccdc6df4b5d"). InnerVolumeSpecName "kube-api-access-qlqzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.258708 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "227ca7a7-8851-4f84-bca8-bccdc6df4b5d" (UID: "227ca7a7-8851-4f84-bca8-bccdc6df4b5d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.275088 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.275137 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlqzr\" (UniqueName: \"kubernetes.io/projected/227ca7a7-8851-4f84-bca8-bccdc6df4b5d-kube-api-access-qlqzr\") on node \"crc\" DevicePath \"\"" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.696587 4636 generic.go:334] "Generic (PLEG): container finished" podID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerID="b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439" exitCode=0 Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.696640 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtzqk" event={"ID":"227ca7a7-8851-4f84-bca8-bccdc6df4b5d","Type":"ContainerDied","Data":"b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439"} Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.696667 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtzqk" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.697518 4636 scope.go:117] "RemoveContainer" containerID="b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.697429 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtzqk" event={"ID":"227ca7a7-8851-4f84-bca8-bccdc6df4b5d","Type":"ContainerDied","Data":"0152d18ee4dc77cb898994d01f85aab5fabc6a8d55380acaf90db4f1aaca90f6"} Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.740817 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gtzqk"] Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.741850 4636 scope.go:117] "RemoveContainer" containerID="f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.746926 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gtzqk"] Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.773386 4636 scope.go:117] "RemoveContainer" containerID="34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.817575 4636 scope.go:117] "RemoveContainer" containerID="b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439" Oct 02 22:10:47 crc kubenswrapper[4636]: E1002 22:10:47.818212 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439\": container with ID starting with b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439 not found: ID does not exist" containerID="b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.818271 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439"} err="failed to get container status \"b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439\": rpc error: code = NotFound desc = could not find container \"b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439\": container with ID starting with b34192d360eb587e1c8d46d53d0ca3dd5a21774e9f56d5d2a6ab1b85f4a93439 not found: ID does not exist" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.818302 4636 scope.go:117] "RemoveContainer" containerID="f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b" Oct 02 22:10:47 crc kubenswrapper[4636]: E1002 22:10:47.818610 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b\": container with ID starting with f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b not found: ID does not exist" containerID="f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.818642 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b"} err="failed to get container status \"f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b\": rpc error: code = NotFound desc = could not find container \"f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b\": container with ID starting with f5e042c6c1f7291b8171792bc5c67cfc69730a56dd3e6efd47729d8462f9db2b not found: ID does not exist" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.818662 4636 scope.go:117] "RemoveContainer" containerID="34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437" Oct 02 22:10:47 crc kubenswrapper[4636]: E1002 22:10:47.818990 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437\": container with ID starting with 34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437 not found: ID does not exist" containerID="34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437" Oct 02 22:10:47 crc kubenswrapper[4636]: I1002 22:10:47.819010 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437"} err="failed to get container status \"34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437\": rpc error: code = NotFound desc = could not find container \"34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437\": container with ID starting with 34cc933b096c733aa57287f56c1836b0ad1c2b37e555f3db572f853ac503f437 not found: ID does not exist" Oct 02 22:10:49 crc kubenswrapper[4636]: I1002 22:10:49.613569 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" path="/var/lib/kubelet/pods/227ca7a7-8851-4f84-bca8-bccdc6df4b5d/volumes" Oct 02 22:10:53 crc kubenswrapper[4636]: I1002 22:10:53.117274 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:10:53 crc kubenswrapper[4636]: I1002 22:10:53.117915 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:11:23 crc kubenswrapper[4636]: I1002 22:11:23.117351 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:11:23 crc kubenswrapper[4636]: I1002 22:11:23.117892 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:11:23 crc kubenswrapper[4636]: I1002 22:11:23.117938 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 22:11:23 crc kubenswrapper[4636]: I1002 22:11:23.118694 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 22:11:23 crc kubenswrapper[4636]: I1002 22:11:23.118771 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" gracePeriod=600 Oct 02 22:11:23 crc kubenswrapper[4636]: E1002 22:11:23.236994 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:11:24 crc kubenswrapper[4636]: I1002 22:11:24.039298 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" exitCode=0 Oct 02 22:11:24 crc kubenswrapper[4636]: I1002 22:11:24.039353 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5"} Oct 02 22:11:24 crc kubenswrapper[4636]: I1002 22:11:24.039394 4636 scope.go:117] "RemoveContainer" containerID="c57f702623d78c15b7af8e1c0519f45dc833960a67c40dee84b7b18cf3b91b6c" Oct 02 22:11:24 crc kubenswrapper[4636]: I1002 22:11:24.040181 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:11:24 crc kubenswrapper[4636]: E1002 22:11:24.040516 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:11:35 crc kubenswrapper[4636]: I1002 22:11:35.603852 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:11:35 crc kubenswrapper[4636]: E1002 22:11:35.604702 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:11:46 crc kubenswrapper[4636]: I1002 22:11:46.603650 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:11:46 crc kubenswrapper[4636]: E1002 22:11:46.604596 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:11:59 crc kubenswrapper[4636]: I1002 22:11:59.612196 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:11:59 crc kubenswrapper[4636]: E1002 22:11:59.613308 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:12:11 crc kubenswrapper[4636]: I1002 22:12:11.604167 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:12:11 crc kubenswrapper[4636]: E1002 22:12:11.604808 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:12:25 crc kubenswrapper[4636]: I1002 22:12:25.605966 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:12:25 crc kubenswrapper[4636]: E1002 22:12:25.607210 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:12:37 crc kubenswrapper[4636]: I1002 22:12:37.604036 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:12:37 crc kubenswrapper[4636]: E1002 22:12:37.606297 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:12:49 crc kubenswrapper[4636]: I1002 22:12:49.614321 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:12:49 crc kubenswrapper[4636]: E1002 22:12:49.615077 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:13:00 crc kubenswrapper[4636]: I1002 22:13:00.605086 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:13:00 crc kubenswrapper[4636]: E1002 22:13:00.606060 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:13:14 crc kubenswrapper[4636]: I1002 22:13:14.604293 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:13:14 crc kubenswrapper[4636]: E1002 22:13:14.605454 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.133614 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4g494"] Oct 02 22:13:28 crc kubenswrapper[4636]: E1002 22:13:28.134490 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerName="registry-server" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.134503 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerName="registry-server" Oct 02 22:13:28 crc kubenswrapper[4636]: E1002 22:13:28.134513 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerName="extract-utilities" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.134520 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerName="extract-utilities" Oct 02 22:13:28 crc kubenswrapper[4636]: E1002 22:13:28.134537 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerName="extract-content" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.134543 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerName="extract-content" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.134716 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="227ca7a7-8851-4f84-bca8-bccdc6df4b5d" containerName="registry-server" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.136024 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.155420 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4g494"] Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.295688 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-utilities\") pod \"certified-operators-4g494\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.295821 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-catalog-content\") pod \"certified-operators-4g494\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.295843 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf6fc\" (UniqueName: \"kubernetes.io/projected/b5deeca0-fe3b-49f8-8e57-313270678339-kube-api-access-kf6fc\") pod \"certified-operators-4g494\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.400466 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-utilities\") pod \"certified-operators-4g494\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.400687 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-catalog-content\") pod \"certified-operators-4g494\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.400717 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf6fc\" (UniqueName: \"kubernetes.io/projected/b5deeca0-fe3b-49f8-8e57-313270678339-kube-api-access-kf6fc\") pod \"certified-operators-4g494\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.400980 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-utilities\") pod \"certified-operators-4g494\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.401012 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-catalog-content\") pod \"certified-operators-4g494\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.420496 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf6fc\" (UniqueName: \"kubernetes.io/projected/b5deeca0-fe3b-49f8-8e57-313270678339-kube-api-access-kf6fc\") pod \"certified-operators-4g494\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.463315 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.604706 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:13:28 crc kubenswrapper[4636]: E1002 22:13:28.605172 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:13:28 crc kubenswrapper[4636]: I1002 22:13:28.816039 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4g494"] Oct 02 22:13:29 crc kubenswrapper[4636]: I1002 22:13:29.263932 4636 generic.go:334] "Generic (PLEG): container finished" podID="b5deeca0-fe3b-49f8-8e57-313270678339" containerID="eb2f7b36d252df22c4f268398bd16f9cb7b402f316a2df7be3b918ae1bdc84c3" exitCode=0 Oct 02 22:13:29 crc kubenswrapper[4636]: I1002 22:13:29.263972 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4g494" event={"ID":"b5deeca0-fe3b-49f8-8e57-313270678339","Type":"ContainerDied","Data":"eb2f7b36d252df22c4f268398bd16f9cb7b402f316a2df7be3b918ae1bdc84c3"} Oct 02 22:13:29 crc kubenswrapper[4636]: I1002 22:13:29.263997 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4g494" event={"ID":"b5deeca0-fe3b-49f8-8e57-313270678339","Type":"ContainerStarted","Data":"7ee0353d16c295cd7e8e3d2cf4ca2c2a60bc6bf8f22c8dfb960a6c6e5151e0d0"} Oct 02 22:13:30 crc kubenswrapper[4636]: I1002 22:13:30.273818 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4g494" event={"ID":"b5deeca0-fe3b-49f8-8e57-313270678339","Type":"ContainerStarted","Data":"6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb"} Oct 02 22:13:31 crc kubenswrapper[4636]: I1002 22:13:31.282014 4636 generic.go:334] "Generic (PLEG): container finished" podID="b5deeca0-fe3b-49f8-8e57-313270678339" containerID="6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb" exitCode=0 Oct 02 22:13:31 crc kubenswrapper[4636]: I1002 22:13:31.282050 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4g494" event={"ID":"b5deeca0-fe3b-49f8-8e57-313270678339","Type":"ContainerDied","Data":"6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb"} Oct 02 22:13:32 crc kubenswrapper[4636]: I1002 22:13:32.294130 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4g494" event={"ID":"b5deeca0-fe3b-49f8-8e57-313270678339","Type":"ContainerStarted","Data":"275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01"} Oct 02 22:13:32 crc kubenswrapper[4636]: I1002 22:13:32.318011 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4g494" podStartSLOduration=1.8128163590000002 podStartE2EDuration="4.317990874s" podCreationTimestamp="2025-10-02 22:13:28 +0000 UTC" firstStartedPulling="2025-10-02 22:13:29.266002506 +0000 UTC m=+3000.589010525" lastFinishedPulling="2025-10-02 22:13:31.771177021 +0000 UTC m=+3003.094185040" observedRunningTime="2025-10-02 22:13:32.309966799 +0000 UTC m=+3003.632974828" watchObservedRunningTime="2025-10-02 22:13:32.317990874 +0000 UTC m=+3003.640998903" Oct 02 22:13:38 crc kubenswrapper[4636]: I1002 22:13:38.464217 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:38 crc kubenswrapper[4636]: I1002 22:13:38.466689 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:38 crc kubenswrapper[4636]: I1002 22:13:38.520891 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:39 crc kubenswrapper[4636]: I1002 22:13:39.406121 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:39 crc kubenswrapper[4636]: I1002 22:13:39.460503 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4g494"] Oct 02 22:13:40 crc kubenswrapper[4636]: I1002 22:13:40.368687 4636 generic.go:334] "Generic (PLEG): container finished" podID="cf879426-52a4-4ab6-9271-6b9e0c74ebfb" containerID="1de6a0dbe1bb88a46693c4fb6b7b90702c629040e282443067a4644479983a9e" exitCode=0 Oct 02 22:13:40 crc kubenswrapper[4636]: I1002 22:13:40.369623 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" event={"ID":"cf879426-52a4-4ab6-9271-6b9e0c74ebfb","Type":"ContainerDied","Data":"1de6a0dbe1bb88a46693c4fb6b7b90702c629040e282443067a4644479983a9e"} Oct 02 22:13:40 crc kubenswrapper[4636]: I1002 22:13:40.613919 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:13:40 crc kubenswrapper[4636]: E1002 22:13:40.617136 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:13:41 crc kubenswrapper[4636]: I1002 22:13:41.376970 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4g494" podUID="b5deeca0-fe3b-49f8-8e57-313270678339" containerName="registry-server" containerID="cri-o://275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01" gracePeriod=2 Oct 02 22:13:41 crc kubenswrapper[4636]: I1002 22:13:41.960401 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.058884 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.075197 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kf6fc\" (UniqueName: \"kubernetes.io/projected/b5deeca0-fe3b-49f8-8e57-313270678339-kube-api-access-kf6fc\") pod \"b5deeca0-fe3b-49f8-8e57-313270678339\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.075374 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-utilities\") pod \"b5deeca0-fe3b-49f8-8e57-313270678339\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.075449 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-catalog-content\") pod \"b5deeca0-fe3b-49f8-8e57-313270678339\" (UID: \"b5deeca0-fe3b-49f8-8e57-313270678339\") " Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.084233 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-utilities" (OuterVolumeSpecName: "utilities") pod "b5deeca0-fe3b-49f8-8e57-313270678339" (UID: "b5deeca0-fe3b-49f8-8e57-313270678339"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.088016 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5deeca0-fe3b-49f8-8e57-313270678339-kube-api-access-kf6fc" (OuterVolumeSpecName: "kube-api-access-kf6fc") pod "b5deeca0-fe3b-49f8-8e57-313270678339" (UID: "b5deeca0-fe3b-49f8-8e57-313270678339"). InnerVolumeSpecName "kube-api-access-kf6fc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.121792 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5deeca0-fe3b-49f8-8e57-313270678339" (UID: "b5deeca0-fe3b-49f8-8e57-313270678339"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.177419 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ssh-key\") pod \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.177545 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-inventory\") pod \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.177610 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbmc6\" (UniqueName: \"kubernetes.io/projected/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-kube-api-access-pbmc6\") pod \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.177658 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-telemetry-combined-ca-bundle\") pod \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.177742 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-1\") pod \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.177822 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-0\") pod \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.177846 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-2\") pod \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\" (UID: \"cf879426-52a4-4ab6-9271-6b9e0c74ebfb\") " Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.178224 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kf6fc\" (UniqueName: \"kubernetes.io/projected/b5deeca0-fe3b-49f8-8e57-313270678339-kube-api-access-kf6fc\") on node \"crc\" DevicePath \"\"" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.178234 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.178242 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5deeca0-fe3b-49f8-8e57-313270678339-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.185038 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-kube-api-access-pbmc6" (OuterVolumeSpecName: "kube-api-access-pbmc6") pod "cf879426-52a4-4ab6-9271-6b9e0c74ebfb" (UID: "cf879426-52a4-4ab6-9271-6b9e0c74ebfb"). InnerVolumeSpecName "kube-api-access-pbmc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.195157 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "cf879426-52a4-4ab6-9271-6b9e0c74ebfb" (UID: "cf879426-52a4-4ab6-9271-6b9e0c74ebfb"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.203303 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cf879426-52a4-4ab6-9271-6b9e0c74ebfb" (UID: "cf879426-52a4-4ab6-9271-6b9e0c74ebfb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.203685 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "cf879426-52a4-4ab6-9271-6b9e0c74ebfb" (UID: "cf879426-52a4-4ab6-9271-6b9e0c74ebfb"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.206429 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "cf879426-52a4-4ab6-9271-6b9e0c74ebfb" (UID: "cf879426-52a4-4ab6-9271-6b9e0c74ebfb"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.206660 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-inventory" (OuterVolumeSpecName: "inventory") pod "cf879426-52a4-4ab6-9271-6b9e0c74ebfb" (UID: "cf879426-52a4-4ab6-9271-6b9e0c74ebfb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.210226 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "cf879426-52a4-4ab6-9271-6b9e0c74ebfb" (UID: "cf879426-52a4-4ab6-9271-6b9e0c74ebfb"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.280737 4636 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.280789 4636 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.280799 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.280809 4636 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-inventory\") on node \"crc\" DevicePath \"\"" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.280821 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbmc6\" (UniqueName: \"kubernetes.io/projected/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-kube-api-access-pbmc6\") on node \"crc\" DevicePath \"\"" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.280830 4636 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.280839 4636 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/cf879426-52a4-4ab6-9271-6b9e0c74ebfb-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.391471 4636 generic.go:334] "Generic (PLEG): container finished" podID="b5deeca0-fe3b-49f8-8e57-313270678339" containerID="275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01" exitCode=0 Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.391529 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4g494" event={"ID":"b5deeca0-fe3b-49f8-8e57-313270678339","Type":"ContainerDied","Data":"275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01"} Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.391556 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4g494" event={"ID":"b5deeca0-fe3b-49f8-8e57-313270678339","Type":"ContainerDied","Data":"7ee0353d16c295cd7e8e3d2cf4ca2c2a60bc6bf8f22c8dfb960a6c6e5151e0d0"} Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.391572 4636 scope.go:117] "RemoveContainer" containerID="275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.391691 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4g494" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.406173 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" event={"ID":"cf879426-52a4-4ab6-9271-6b9e0c74ebfb","Type":"ContainerDied","Data":"6478f3ba8c8674c0f66ce12d0fb37081116a8d84fab5f2d3a2227d5a01eb27ed"} Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.406219 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6478f3ba8c8674c0f66ce12d0fb37081116a8d84fab5f2d3a2227d5a01eb27ed" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.406220 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.429948 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4g494"] Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.438677 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4g494"] Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.442314 4636 scope.go:117] "RemoveContainer" containerID="6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.460254 4636 scope.go:117] "RemoveContainer" containerID="eb2f7b36d252df22c4f268398bd16f9cb7b402f316a2df7be3b918ae1bdc84c3" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.490002 4636 scope.go:117] "RemoveContainer" containerID="275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01" Oct 02 22:13:42 crc kubenswrapper[4636]: E1002 22:13:42.490494 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01\": container with ID starting with 275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01 not found: ID does not exist" containerID="275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.490522 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01"} err="failed to get container status \"275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01\": rpc error: code = NotFound desc = could not find container \"275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01\": container with ID starting with 275e8d1fd9c950961a5ca92bf4a50fe0feeaef07f5aa982ba05115e17a45eb01 not found: ID does not exist" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.490543 4636 scope.go:117] "RemoveContainer" containerID="6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb" Oct 02 22:13:42 crc kubenswrapper[4636]: E1002 22:13:42.490760 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb\": container with ID starting with 6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb not found: ID does not exist" containerID="6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.490777 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb"} err="failed to get container status \"6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb\": rpc error: code = NotFound desc = could not find container \"6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb\": container with ID starting with 6c95b2e26bfa67747a4388588f6d57050de7c29fe001758d924d1f51c7e83deb not found: ID does not exist" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.490789 4636 scope.go:117] "RemoveContainer" containerID="eb2f7b36d252df22c4f268398bd16f9cb7b402f316a2df7be3b918ae1bdc84c3" Oct 02 22:13:42 crc kubenswrapper[4636]: E1002 22:13:42.491007 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb2f7b36d252df22c4f268398bd16f9cb7b402f316a2df7be3b918ae1bdc84c3\": container with ID starting with eb2f7b36d252df22c4f268398bd16f9cb7b402f316a2df7be3b918ae1bdc84c3 not found: ID does not exist" containerID="eb2f7b36d252df22c4f268398bd16f9cb7b402f316a2df7be3b918ae1bdc84c3" Oct 02 22:13:42 crc kubenswrapper[4636]: I1002 22:13:42.491030 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb2f7b36d252df22c4f268398bd16f9cb7b402f316a2df7be3b918ae1bdc84c3"} err="failed to get container status \"eb2f7b36d252df22c4f268398bd16f9cb7b402f316a2df7be3b918ae1bdc84c3\": rpc error: code = NotFound desc = could not find container \"eb2f7b36d252df22c4f268398bd16f9cb7b402f316a2df7be3b918ae1bdc84c3\": container with ID starting with eb2f7b36d252df22c4f268398bd16f9cb7b402f316a2df7be3b918ae1bdc84c3 not found: ID does not exist" Oct 02 22:13:43 crc kubenswrapper[4636]: I1002 22:13:43.614799 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5deeca0-fe3b-49f8-8e57-313270678339" path="/var/lib/kubelet/pods/b5deeca0-fe3b-49f8-8e57-313270678339/volumes" Oct 02 22:13:54 crc kubenswrapper[4636]: I1002 22:13:54.604245 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:13:54 crc kubenswrapper[4636]: E1002 22:13:54.605635 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:14:09 crc kubenswrapper[4636]: I1002 22:14:09.610124 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:14:09 crc kubenswrapper[4636]: E1002 22:14:09.610944 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:14:24 crc kubenswrapper[4636]: I1002 22:14:24.604636 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:14:24 crc kubenswrapper[4636]: E1002 22:14:24.606268 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:14:38 crc kubenswrapper[4636]: I1002 22:14:38.603720 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:14:38 crc kubenswrapper[4636]: E1002 22:14:38.604583 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.434736 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 02 22:14:43 crc kubenswrapper[4636]: E1002 22:14:43.436548 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf879426-52a4-4ab6-9271-6b9e0c74ebfb" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.436615 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf879426-52a4-4ab6-9271-6b9e0c74ebfb" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 02 22:14:43 crc kubenswrapper[4636]: E1002 22:14:43.436652 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5deeca0-fe3b-49f8-8e57-313270678339" containerName="extract-utilities" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.436699 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5deeca0-fe3b-49f8-8e57-313270678339" containerName="extract-utilities" Oct 02 22:14:43 crc kubenswrapper[4636]: E1002 22:14:43.436795 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5deeca0-fe3b-49f8-8e57-313270678339" containerName="registry-server" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.436816 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5deeca0-fe3b-49f8-8e57-313270678339" containerName="registry-server" Oct 02 22:14:43 crc kubenswrapper[4636]: E1002 22:14:43.436844 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5deeca0-fe3b-49f8-8e57-313270678339" containerName="extract-content" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.436856 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5deeca0-fe3b-49f8-8e57-313270678339" containerName="extract-content" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.437188 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf879426-52a4-4ab6-9271-6b9e0c74ebfb" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.437222 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5deeca0-fe3b-49f8-8e57-313270678339" containerName="registry-server" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.438175 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.441343 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.441832 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-n75qw" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.442939 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.443922 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.464788 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.565390 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dtxc\" (UniqueName: \"kubernetes.io/projected/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-kube-api-access-8dtxc\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.565645 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.565741 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.565849 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.565946 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.566058 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.566180 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.566256 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.566338 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-config-data\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.668783 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.668964 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.669050 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.669094 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-config-data\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.669195 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dtxc\" (UniqueName: \"kubernetes.io/projected/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-kube-api-access-8dtxc\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.669262 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.669305 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.669342 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.669423 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.670725 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.670876 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.671704 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.671967 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.672224 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-config-data\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.678630 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.684914 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.685004 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.699649 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dtxc\" (UniqueName: \"kubernetes.io/projected/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-kube-api-access-8dtxc\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.707510 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " pod="openstack/tempest-tests-tempest" Oct 02 22:14:43 crc kubenswrapper[4636]: I1002 22:14:43.770838 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 02 22:14:44 crc kubenswrapper[4636]: I1002 22:14:44.204875 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 02 22:14:44 crc kubenswrapper[4636]: W1002 22:14:44.207043 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod388bd5ff_d88f_4ea6_83e0_0ae99fc188ab.slice/crio-79e227e50ea8f0855a8ab93712bb83d830e94c31176fb510fe2927af86bd12f7 WatchSource:0}: Error finding container 79e227e50ea8f0855a8ab93712bb83d830e94c31176fb510fe2927af86bd12f7: Status 404 returned error can't find the container with id 79e227e50ea8f0855a8ab93712bb83d830e94c31176fb510fe2927af86bd12f7 Oct 02 22:14:45 crc kubenswrapper[4636]: I1002 22:14:45.030236 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab","Type":"ContainerStarted","Data":"79e227e50ea8f0855a8ab93712bb83d830e94c31176fb510fe2927af86bd12f7"} Oct 02 22:14:49 crc kubenswrapper[4636]: I1002 22:14:49.611177 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:14:49 crc kubenswrapper[4636]: E1002 22:14:49.612060 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.158480 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc"] Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.160532 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.163521 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.165589 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.171772 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc"] Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.340733 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6jxk\" (UniqueName: \"kubernetes.io/projected/d42ca824-f82c-40c1-b997-bcb3008c6c37-kube-api-access-c6jxk\") pod \"collect-profiles-29324055-fl4gc\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.341171 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d42ca824-f82c-40c1-b997-bcb3008c6c37-secret-volume\") pod \"collect-profiles-29324055-fl4gc\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.341267 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d42ca824-f82c-40c1-b997-bcb3008c6c37-config-volume\") pod \"collect-profiles-29324055-fl4gc\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.443534 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d42ca824-f82c-40c1-b997-bcb3008c6c37-secret-volume\") pod \"collect-profiles-29324055-fl4gc\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.443652 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d42ca824-f82c-40c1-b997-bcb3008c6c37-config-volume\") pod \"collect-profiles-29324055-fl4gc\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.443802 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6jxk\" (UniqueName: \"kubernetes.io/projected/d42ca824-f82c-40c1-b997-bcb3008c6c37-kube-api-access-c6jxk\") pod \"collect-profiles-29324055-fl4gc\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.444985 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d42ca824-f82c-40c1-b997-bcb3008c6c37-config-volume\") pod \"collect-profiles-29324055-fl4gc\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.462076 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6jxk\" (UniqueName: \"kubernetes.io/projected/d42ca824-f82c-40c1-b997-bcb3008c6c37-kube-api-access-c6jxk\") pod \"collect-profiles-29324055-fl4gc\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.466525 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d42ca824-f82c-40c1-b997-bcb3008c6c37-secret-volume\") pod \"collect-profiles-29324055-fl4gc\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:00 crc kubenswrapper[4636]: I1002 22:15:00.483980 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:02 crc kubenswrapper[4636]: I1002 22:15:02.603837 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:15:02 crc kubenswrapper[4636]: E1002 22:15:02.604947 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.638022 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hq78w"] Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.643546 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.660115 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hq78w"] Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.732154 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg8bz\" (UniqueName: \"kubernetes.io/projected/f2489be1-e5c1-453d-937c-bd05f4152075-kube-api-access-sg8bz\") pod \"community-operators-hq78w\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.732231 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-catalog-content\") pod \"community-operators-hq78w\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.732304 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-utilities\") pod \"community-operators-hq78w\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.833570 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg8bz\" (UniqueName: \"kubernetes.io/projected/f2489be1-e5c1-453d-937c-bd05f4152075-kube-api-access-sg8bz\") pod \"community-operators-hq78w\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.833618 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-catalog-content\") pod \"community-operators-hq78w\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.833667 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-utilities\") pod \"community-operators-hq78w\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.834100 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-utilities\") pod \"community-operators-hq78w\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.834428 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-catalog-content\") pod \"community-operators-hq78w\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.836296 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ntv"] Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.841848 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.887129 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ntv"] Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.900087 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg8bz\" (UniqueName: \"kubernetes.io/projected/f2489be1-e5c1-453d-937c-bd05f4152075-kube-api-access-sg8bz\") pod \"community-operators-hq78w\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.937418 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tzqv\" (UniqueName: \"kubernetes.io/projected/6b79e796-49c2-4aeb-82be-e68f867201a1-kube-api-access-2tzqv\") pod \"redhat-marketplace-l5ntv\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.937809 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-catalog-content\") pod \"redhat-marketplace-l5ntv\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.937883 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-utilities\") pod \"redhat-marketplace-l5ntv\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:09 crc kubenswrapper[4636]: I1002 22:15:09.988041 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:10 crc kubenswrapper[4636]: I1002 22:15:10.039381 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tzqv\" (UniqueName: \"kubernetes.io/projected/6b79e796-49c2-4aeb-82be-e68f867201a1-kube-api-access-2tzqv\") pod \"redhat-marketplace-l5ntv\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:10 crc kubenswrapper[4636]: I1002 22:15:10.039477 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-catalog-content\") pod \"redhat-marketplace-l5ntv\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:10 crc kubenswrapper[4636]: I1002 22:15:10.039520 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-utilities\") pod \"redhat-marketplace-l5ntv\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:10 crc kubenswrapper[4636]: I1002 22:15:10.040046 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-utilities\") pod \"redhat-marketplace-l5ntv\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:10 crc kubenswrapper[4636]: I1002 22:15:10.040126 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-catalog-content\") pod \"redhat-marketplace-l5ntv\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:10 crc kubenswrapper[4636]: I1002 22:15:10.068480 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tzqv\" (UniqueName: \"kubernetes.io/projected/6b79e796-49c2-4aeb-82be-e68f867201a1-kube-api-access-2tzqv\") pod \"redhat-marketplace-l5ntv\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:10 crc kubenswrapper[4636]: I1002 22:15:10.190292 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:11 crc kubenswrapper[4636]: E1002 22:15:11.549673 4636 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 02 22:15:11 crc kubenswrapper[4636]: E1002 22:15:11.553453 4636 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8dtxc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(388bd5ff-d88f-4ea6-83e0-0ae99fc188ab): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 22:15:11 crc kubenswrapper[4636]: E1002 22:15:11.554612 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" Oct 02 22:15:12 crc kubenswrapper[4636]: I1002 22:15:12.027600 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ntv"] Oct 02 22:15:12 crc kubenswrapper[4636]: W1002 22:15:12.111923 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd42ca824_f82c_40c1_b997_bcb3008c6c37.slice/crio-6313cf8beb3efb539c2c1a53499e639934da5d492a0863b5b5fd6a584de3a41b WatchSource:0}: Error finding container 6313cf8beb3efb539c2c1a53499e639934da5d492a0863b5b5fd6a584de3a41b: Status 404 returned error can't find the container with id 6313cf8beb3efb539c2c1a53499e639934da5d492a0863b5b5fd6a584de3a41b Oct 02 22:15:12 crc kubenswrapper[4636]: I1002 22:15:12.126776 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hq78w"] Oct 02 22:15:12 crc kubenswrapper[4636]: I1002 22:15:12.145030 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc"] Oct 02 22:15:12 crc kubenswrapper[4636]: I1002 22:15:12.305030 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq78w" event={"ID":"f2489be1-e5c1-453d-937c-bd05f4152075","Type":"ContainerStarted","Data":"6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b"} Oct 02 22:15:12 crc kubenswrapper[4636]: I1002 22:15:12.305068 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq78w" event={"ID":"f2489be1-e5c1-453d-937c-bd05f4152075","Type":"ContainerStarted","Data":"dc3fda6f9dd92e7f45edaa9c01e28d1b45f7233e7cbd0b7cbdf5cd5a650b4411"} Oct 02 22:15:12 crc kubenswrapper[4636]: I1002 22:15:12.308680 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" event={"ID":"d42ca824-f82c-40c1-b997-bcb3008c6c37","Type":"ContainerStarted","Data":"8968a51fd6a48585fdfb27d386292d96a79693e0f861c531a60040cf1d2a7457"} Oct 02 22:15:12 crc kubenswrapper[4636]: I1002 22:15:12.308731 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" event={"ID":"d42ca824-f82c-40c1-b997-bcb3008c6c37","Type":"ContainerStarted","Data":"6313cf8beb3efb539c2c1a53499e639934da5d492a0863b5b5fd6a584de3a41b"} Oct 02 22:15:12 crc kubenswrapper[4636]: I1002 22:15:12.310684 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ntv" event={"ID":"6b79e796-49c2-4aeb-82be-e68f867201a1","Type":"ContainerStarted","Data":"6a516a7a81c07e67ce0874ae6dbdccc0dd8807ab75a4bf31b9aa46eec2dcb8a2"} Oct 02 22:15:12 crc kubenswrapper[4636]: I1002 22:15:12.310707 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ntv" event={"ID":"6b79e796-49c2-4aeb-82be-e68f867201a1","Type":"ContainerStarted","Data":"8bcddb5b05c51f628ee894bd03dbe33b68016493e2da1ac8a3eb54a737c5f4eb"} Oct 02 22:15:12 crc kubenswrapper[4636]: E1002 22:15:12.312081 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" Oct 02 22:15:13 crc kubenswrapper[4636]: I1002 22:15:13.324528 4636 generic.go:334] "Generic (PLEG): container finished" podID="f2489be1-e5c1-453d-937c-bd05f4152075" containerID="6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b" exitCode=0 Oct 02 22:15:13 crc kubenswrapper[4636]: I1002 22:15:13.324584 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq78w" event={"ID":"f2489be1-e5c1-453d-937c-bd05f4152075","Type":"ContainerDied","Data":"6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b"} Oct 02 22:15:13 crc kubenswrapper[4636]: I1002 22:15:13.326619 4636 generic.go:334] "Generic (PLEG): container finished" podID="6b79e796-49c2-4aeb-82be-e68f867201a1" containerID="6a516a7a81c07e67ce0874ae6dbdccc0dd8807ab75a4bf31b9aa46eec2dcb8a2" exitCode=0 Oct 02 22:15:13 crc kubenswrapper[4636]: I1002 22:15:13.326678 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ntv" event={"ID":"6b79e796-49c2-4aeb-82be-e68f867201a1","Type":"ContainerDied","Data":"6a516a7a81c07e67ce0874ae6dbdccc0dd8807ab75a4bf31b9aa46eec2dcb8a2"} Oct 02 22:15:13 crc kubenswrapper[4636]: I1002 22:15:13.328315 4636 generic.go:334] "Generic (PLEG): container finished" podID="d42ca824-f82c-40c1-b997-bcb3008c6c37" containerID="8968a51fd6a48585fdfb27d386292d96a79693e0f861c531a60040cf1d2a7457" exitCode=0 Oct 02 22:15:13 crc kubenswrapper[4636]: I1002 22:15:13.328350 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" event={"ID":"d42ca824-f82c-40c1-b997-bcb3008c6c37","Type":"ContainerDied","Data":"8968a51fd6a48585fdfb27d386292d96a79693e0f861c531a60040cf1d2a7457"} Oct 02 22:15:13 crc kubenswrapper[4636]: I1002 22:15:13.329184 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 22:15:13 crc kubenswrapper[4636]: I1002 22:15:13.349368 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" podStartSLOduration=13.349351426 podStartE2EDuration="13.349351426s" podCreationTimestamp="2025-10-02 22:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 22:15:12.388089337 +0000 UTC m=+3103.711097356" watchObservedRunningTime="2025-10-02 22:15:13.349351426 +0000 UTC m=+3104.672359445" Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.340290 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq78w" event={"ID":"f2489be1-e5c1-453d-937c-bd05f4152075","Type":"ContainerStarted","Data":"6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776"} Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.344153 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ntv" event={"ID":"6b79e796-49c2-4aeb-82be-e68f867201a1","Type":"ContainerStarted","Data":"ba1b101e3ee0e550b5ca2a9a39c0b7559ffea67ff8d90731a6d3d13cce21ef54"} Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.604143 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:15:14 crc kubenswrapper[4636]: E1002 22:15:14.604821 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.686478 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.831738 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d42ca824-f82c-40c1-b997-bcb3008c6c37-secret-volume\") pod \"d42ca824-f82c-40c1-b997-bcb3008c6c37\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.832863 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d42ca824-f82c-40c1-b997-bcb3008c6c37-config-volume\") pod \"d42ca824-f82c-40c1-b997-bcb3008c6c37\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.833103 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6jxk\" (UniqueName: \"kubernetes.io/projected/d42ca824-f82c-40c1-b997-bcb3008c6c37-kube-api-access-c6jxk\") pod \"d42ca824-f82c-40c1-b997-bcb3008c6c37\" (UID: \"d42ca824-f82c-40c1-b997-bcb3008c6c37\") " Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.833645 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d42ca824-f82c-40c1-b997-bcb3008c6c37-config-volume" (OuterVolumeSpecName: "config-volume") pod "d42ca824-f82c-40c1-b997-bcb3008c6c37" (UID: "d42ca824-f82c-40c1-b997-bcb3008c6c37"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.834437 4636 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d42ca824-f82c-40c1-b997-bcb3008c6c37-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.840265 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d42ca824-f82c-40c1-b997-bcb3008c6c37-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d42ca824-f82c-40c1-b997-bcb3008c6c37" (UID: "d42ca824-f82c-40c1-b997-bcb3008c6c37"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.843653 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d42ca824-f82c-40c1-b997-bcb3008c6c37-kube-api-access-c6jxk" (OuterVolumeSpecName: "kube-api-access-c6jxk") pod "d42ca824-f82c-40c1-b997-bcb3008c6c37" (UID: "d42ca824-f82c-40c1-b997-bcb3008c6c37"). InnerVolumeSpecName "kube-api-access-c6jxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.936939 4636 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d42ca824-f82c-40c1-b997-bcb3008c6c37-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 02 22:15:14 crc kubenswrapper[4636]: I1002 22:15:14.936970 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6jxk\" (UniqueName: \"kubernetes.io/projected/d42ca824-f82c-40c1-b997-bcb3008c6c37-kube-api-access-c6jxk\") on node \"crc\" DevicePath \"\"" Oct 02 22:15:15 crc kubenswrapper[4636]: I1002 22:15:15.167408 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph"] Oct 02 22:15:15 crc kubenswrapper[4636]: I1002 22:15:15.208276 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324010-cmdph"] Oct 02 22:15:15 crc kubenswrapper[4636]: I1002 22:15:15.353588 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" event={"ID":"d42ca824-f82c-40c1-b997-bcb3008c6c37","Type":"ContainerDied","Data":"6313cf8beb3efb539c2c1a53499e639934da5d492a0863b5b5fd6a584de3a41b"} Oct 02 22:15:15 crc kubenswrapper[4636]: I1002 22:15:15.353651 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6313cf8beb3efb539c2c1a53499e639934da5d492a0863b5b5fd6a584de3a41b" Oct 02 22:15:15 crc kubenswrapper[4636]: I1002 22:15:15.353743 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324055-fl4gc" Oct 02 22:15:15 crc kubenswrapper[4636]: I1002 22:15:15.357168 4636 generic.go:334] "Generic (PLEG): container finished" podID="6b79e796-49c2-4aeb-82be-e68f867201a1" containerID="ba1b101e3ee0e550b5ca2a9a39c0b7559ffea67ff8d90731a6d3d13cce21ef54" exitCode=0 Oct 02 22:15:15 crc kubenswrapper[4636]: I1002 22:15:15.357254 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ntv" event={"ID":"6b79e796-49c2-4aeb-82be-e68f867201a1","Type":"ContainerDied","Data":"ba1b101e3ee0e550b5ca2a9a39c0b7559ffea67ff8d90731a6d3d13cce21ef54"} Oct 02 22:15:15 crc kubenswrapper[4636]: I1002 22:15:15.618681 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90a13ad5-0870-486c-9af3-c172240ee976" path="/var/lib/kubelet/pods/90a13ad5-0870-486c-9af3-c172240ee976/volumes" Oct 02 22:15:16 crc kubenswrapper[4636]: I1002 22:15:16.366025 4636 generic.go:334] "Generic (PLEG): container finished" podID="f2489be1-e5c1-453d-937c-bd05f4152075" containerID="6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776" exitCode=0 Oct 02 22:15:16 crc kubenswrapper[4636]: I1002 22:15:16.366103 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq78w" event={"ID":"f2489be1-e5c1-453d-937c-bd05f4152075","Type":"ContainerDied","Data":"6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776"} Oct 02 22:15:16 crc kubenswrapper[4636]: I1002 22:15:16.371989 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ntv" event={"ID":"6b79e796-49c2-4aeb-82be-e68f867201a1","Type":"ContainerStarted","Data":"7d90dcfd452a8e130c5e07e7dee41d16b3094811890763a9884281b1e5268638"} Oct 02 22:15:16 crc kubenswrapper[4636]: I1002 22:15:16.409268 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l5ntv" podStartSLOduration=4.6993637249999995 podStartE2EDuration="7.409249386s" podCreationTimestamp="2025-10-02 22:15:09 +0000 UTC" firstStartedPulling="2025-10-02 22:15:13.328955619 +0000 UTC m=+3104.651963638" lastFinishedPulling="2025-10-02 22:15:16.03884128 +0000 UTC m=+3107.361849299" observedRunningTime="2025-10-02 22:15:16.406937044 +0000 UTC m=+3107.729945063" watchObservedRunningTime="2025-10-02 22:15:16.409249386 +0000 UTC m=+3107.732257405" Oct 02 22:15:17 crc kubenswrapper[4636]: I1002 22:15:17.384016 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq78w" event={"ID":"f2489be1-e5c1-453d-937c-bd05f4152075","Type":"ContainerStarted","Data":"1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e"} Oct 02 22:15:17 crc kubenswrapper[4636]: I1002 22:15:17.407123 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hq78w" podStartSLOduration=4.682851473 podStartE2EDuration="8.407105736s" podCreationTimestamp="2025-10-02 22:15:09 +0000 UTC" firstStartedPulling="2025-10-02 22:15:13.329058772 +0000 UTC m=+3104.652066821" lastFinishedPulling="2025-10-02 22:15:17.053313065 +0000 UTC m=+3108.376321084" observedRunningTime="2025-10-02 22:15:17.404767413 +0000 UTC m=+3108.727775432" watchObservedRunningTime="2025-10-02 22:15:17.407105736 +0000 UTC m=+3108.730113755" Oct 02 22:15:19 crc kubenswrapper[4636]: I1002 22:15:19.989125 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:19 crc kubenswrapper[4636]: I1002 22:15:19.989501 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:20 crc kubenswrapper[4636]: I1002 22:15:20.040681 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:20 crc kubenswrapper[4636]: I1002 22:15:20.191417 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:20 crc kubenswrapper[4636]: I1002 22:15:20.191481 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:20 crc kubenswrapper[4636]: I1002 22:15:20.233200 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:25 crc kubenswrapper[4636]: I1002 22:15:25.471387 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab","Type":"ContainerStarted","Data":"ac56fb672c2604ea33a7b9b2768555e0e046aeaf677cfb13f5fe159415413629"} Oct 02 22:15:25 crc kubenswrapper[4636]: I1002 22:15:25.498626 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.661181968 podStartE2EDuration="43.498607562s" podCreationTimestamp="2025-10-02 22:14:42 +0000 UTC" firstStartedPulling="2025-10-02 22:14:44.208771196 +0000 UTC m=+3075.531779215" lastFinishedPulling="2025-10-02 22:15:24.04619677 +0000 UTC m=+3115.369204809" observedRunningTime="2025-10-02 22:15:25.49591536 +0000 UTC m=+3116.818923379" watchObservedRunningTime="2025-10-02 22:15:25.498607562 +0000 UTC m=+3116.821615581" Oct 02 22:15:25 crc kubenswrapper[4636]: I1002 22:15:25.603855 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:15:25 crc kubenswrapper[4636]: E1002 22:15:25.604163 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:15:30 crc kubenswrapper[4636]: I1002 22:15:30.047080 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:30 crc kubenswrapper[4636]: I1002 22:15:30.093945 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hq78w"] Oct 02 22:15:30 crc kubenswrapper[4636]: I1002 22:15:30.250578 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:30 crc kubenswrapper[4636]: I1002 22:15:30.531240 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hq78w" podUID="f2489be1-e5c1-453d-937c-bd05f4152075" containerName="registry-server" containerID="cri-o://1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e" gracePeriod=2 Oct 02 22:15:30 crc kubenswrapper[4636]: I1002 22:15:30.964903 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.029511 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-utilities\") pod \"f2489be1-e5c1-453d-937c-bd05f4152075\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.029608 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-catalog-content\") pod \"f2489be1-e5c1-453d-937c-bd05f4152075\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.029697 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sg8bz\" (UniqueName: \"kubernetes.io/projected/f2489be1-e5c1-453d-937c-bd05f4152075-kube-api-access-sg8bz\") pod \"f2489be1-e5c1-453d-937c-bd05f4152075\" (UID: \"f2489be1-e5c1-453d-937c-bd05f4152075\") " Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.031131 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-utilities" (OuterVolumeSpecName: "utilities") pod "f2489be1-e5c1-453d-937c-bd05f4152075" (UID: "f2489be1-e5c1-453d-937c-bd05f4152075"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.045048 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2489be1-e5c1-453d-937c-bd05f4152075-kube-api-access-sg8bz" (OuterVolumeSpecName: "kube-api-access-sg8bz") pod "f2489be1-e5c1-453d-937c-bd05f4152075" (UID: "f2489be1-e5c1-453d-937c-bd05f4152075"). InnerVolumeSpecName "kube-api-access-sg8bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.102497 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f2489be1-e5c1-453d-937c-bd05f4152075" (UID: "f2489be1-e5c1-453d-937c-bd05f4152075"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.136082 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sg8bz\" (UniqueName: \"kubernetes.io/projected/f2489be1-e5c1-453d-937c-bd05f4152075-kube-api-access-sg8bz\") on node \"crc\" DevicePath \"\"" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.136115 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.136124 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2489be1-e5c1-453d-937c-bd05f4152075-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.555641 4636 generic.go:334] "Generic (PLEG): container finished" podID="f2489be1-e5c1-453d-937c-bd05f4152075" containerID="1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e" exitCode=0 Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.555852 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hq78w" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.555908 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq78w" event={"ID":"f2489be1-e5c1-453d-937c-bd05f4152075","Type":"ContainerDied","Data":"1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e"} Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.560868 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq78w" event={"ID":"f2489be1-e5c1-453d-937c-bd05f4152075","Type":"ContainerDied","Data":"dc3fda6f9dd92e7f45edaa9c01e28d1b45f7233e7cbd0b7cbdf5cd5a650b4411"} Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.561371 4636 scope.go:117] "RemoveContainer" containerID="1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.583922 4636 scope.go:117] "RemoveContainer" containerID="6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.617052 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hq78w"] Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.619236 4636 scope.go:117] "RemoveContainer" containerID="6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.624608 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hq78w"] Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.652591 4636 scope.go:117] "RemoveContainer" containerID="1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e" Oct 02 22:15:31 crc kubenswrapper[4636]: E1002 22:15:31.653647 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e\": container with ID starting with 1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e not found: ID does not exist" containerID="1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.653699 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e"} err="failed to get container status \"1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e\": rpc error: code = NotFound desc = could not find container \"1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e\": container with ID starting with 1c5bf002e43b2ec39ab5ae5e9b1db00ec2fbfbb45543f852f4297b940ff8522e not found: ID does not exist" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.653730 4636 scope.go:117] "RemoveContainer" containerID="6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776" Oct 02 22:15:31 crc kubenswrapper[4636]: E1002 22:15:31.654279 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776\": container with ID starting with 6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776 not found: ID does not exist" containerID="6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.654313 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776"} err="failed to get container status \"6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776\": rpc error: code = NotFound desc = could not find container \"6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776\": container with ID starting with 6e819e6266c337c8b86add7dd815f351f40bfb9f3ac6e677ee76883092780776 not found: ID does not exist" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.654335 4636 scope.go:117] "RemoveContainer" containerID="6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b" Oct 02 22:15:31 crc kubenswrapper[4636]: E1002 22:15:31.654813 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b\": container with ID starting with 6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b not found: ID does not exist" containerID="6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b" Oct 02 22:15:31 crc kubenswrapper[4636]: I1002 22:15:31.654839 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b"} err="failed to get container status \"6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b\": rpc error: code = NotFound desc = could not find container \"6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b\": container with ID starting with 6d646fa1af257ab961f48ac32a03f905517f3852c5e5c39816e2464b16adb52b not found: ID does not exist" Oct 02 22:15:32 crc kubenswrapper[4636]: I1002 22:15:32.290947 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ntv"] Oct 02 22:15:32 crc kubenswrapper[4636]: I1002 22:15:32.292081 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l5ntv" podUID="6b79e796-49c2-4aeb-82be-e68f867201a1" containerName="registry-server" containerID="cri-o://7d90dcfd452a8e130c5e07e7dee41d16b3094811890763a9884281b1e5268638" gracePeriod=2 Oct 02 22:15:32 crc kubenswrapper[4636]: I1002 22:15:32.565542 4636 generic.go:334] "Generic (PLEG): container finished" podID="6b79e796-49c2-4aeb-82be-e68f867201a1" containerID="7d90dcfd452a8e130c5e07e7dee41d16b3094811890763a9884281b1e5268638" exitCode=0 Oct 02 22:15:32 crc kubenswrapper[4636]: I1002 22:15:32.565860 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ntv" event={"ID":"6b79e796-49c2-4aeb-82be-e68f867201a1","Type":"ContainerDied","Data":"7d90dcfd452a8e130c5e07e7dee41d16b3094811890763a9884281b1e5268638"} Oct 02 22:15:32 crc kubenswrapper[4636]: I1002 22:15:32.878617 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:32 crc kubenswrapper[4636]: I1002 22:15:32.977981 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-catalog-content\") pod \"6b79e796-49c2-4aeb-82be-e68f867201a1\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " Oct 02 22:15:32 crc kubenswrapper[4636]: I1002 22:15:32.978243 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tzqv\" (UniqueName: \"kubernetes.io/projected/6b79e796-49c2-4aeb-82be-e68f867201a1-kube-api-access-2tzqv\") pod \"6b79e796-49c2-4aeb-82be-e68f867201a1\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " Oct 02 22:15:32 crc kubenswrapper[4636]: I1002 22:15:32.978311 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-utilities\") pod \"6b79e796-49c2-4aeb-82be-e68f867201a1\" (UID: \"6b79e796-49c2-4aeb-82be-e68f867201a1\") " Oct 02 22:15:32 crc kubenswrapper[4636]: I1002 22:15:32.978916 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-utilities" (OuterVolumeSpecName: "utilities") pod "6b79e796-49c2-4aeb-82be-e68f867201a1" (UID: "6b79e796-49c2-4aeb-82be-e68f867201a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:15:32 crc kubenswrapper[4636]: I1002 22:15:32.992500 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6b79e796-49c2-4aeb-82be-e68f867201a1" (UID: "6b79e796-49c2-4aeb-82be-e68f867201a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:15:32 crc kubenswrapper[4636]: I1002 22:15:32.992950 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b79e796-49c2-4aeb-82be-e68f867201a1-kube-api-access-2tzqv" (OuterVolumeSpecName: "kube-api-access-2tzqv") pod "6b79e796-49c2-4aeb-82be-e68f867201a1" (UID: "6b79e796-49c2-4aeb-82be-e68f867201a1"). InnerVolumeSpecName "kube-api-access-2tzqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:15:33 crc kubenswrapper[4636]: I1002 22:15:33.080413 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tzqv\" (UniqueName: \"kubernetes.io/projected/6b79e796-49c2-4aeb-82be-e68f867201a1-kube-api-access-2tzqv\") on node \"crc\" DevicePath \"\"" Oct 02 22:15:33 crc kubenswrapper[4636]: I1002 22:15:33.080445 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:15:33 crc kubenswrapper[4636]: I1002 22:15:33.080456 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b79e796-49c2-4aeb-82be-e68f867201a1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:15:33 crc kubenswrapper[4636]: I1002 22:15:33.577798 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ntv" event={"ID":"6b79e796-49c2-4aeb-82be-e68f867201a1","Type":"ContainerDied","Data":"8bcddb5b05c51f628ee894bd03dbe33b68016493e2da1ac8a3eb54a737c5f4eb"} Oct 02 22:15:33 crc kubenswrapper[4636]: I1002 22:15:33.577838 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5ntv" Oct 02 22:15:33 crc kubenswrapper[4636]: I1002 22:15:33.577855 4636 scope.go:117] "RemoveContainer" containerID="7d90dcfd452a8e130c5e07e7dee41d16b3094811890763a9884281b1e5268638" Oct 02 22:15:33 crc kubenswrapper[4636]: I1002 22:15:33.614582 4636 scope.go:117] "RemoveContainer" containerID="ba1b101e3ee0e550b5ca2a9a39c0b7559ffea67ff8d90731a6d3d13cce21ef54" Oct 02 22:15:33 crc kubenswrapper[4636]: I1002 22:15:33.622149 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2489be1-e5c1-453d-937c-bd05f4152075" path="/var/lib/kubelet/pods/f2489be1-e5c1-453d-937c-bd05f4152075/volumes" Oct 02 22:15:33 crc kubenswrapper[4636]: I1002 22:15:33.622737 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ntv"] Oct 02 22:15:33 crc kubenswrapper[4636]: I1002 22:15:33.626159 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ntv"] Oct 02 22:15:33 crc kubenswrapper[4636]: I1002 22:15:33.640059 4636 scope.go:117] "RemoveContainer" containerID="6a516a7a81c07e67ce0874ae6dbdccc0dd8807ab75a4bf31b9aa46eec2dcb8a2" Oct 02 22:15:35 crc kubenswrapper[4636]: I1002 22:15:35.617875 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b79e796-49c2-4aeb-82be-e68f867201a1" path="/var/lib/kubelet/pods/6b79e796-49c2-4aeb-82be-e68f867201a1/volumes" Oct 02 22:15:37 crc kubenswrapper[4636]: I1002 22:15:37.604139 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:15:37 crc kubenswrapper[4636]: E1002 22:15:37.604766 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:15:50 crc kubenswrapper[4636]: I1002 22:15:50.603712 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:15:50 crc kubenswrapper[4636]: E1002 22:15:50.604468 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:16:04 crc kubenswrapper[4636]: I1002 22:16:04.604186 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:16:04 crc kubenswrapper[4636]: E1002 22:16:04.605054 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:16:06 crc kubenswrapper[4636]: I1002 22:16:06.348162 4636 scope.go:117] "RemoveContainer" containerID="df50aed8bd4d5301560977d894d74087273953e8e2d8a4f9e31b5f2f3dac168f" Oct 02 22:16:16 crc kubenswrapper[4636]: I1002 22:16:16.603849 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:16:16 crc kubenswrapper[4636]: E1002 22:16:16.604594 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:16:29 crc kubenswrapper[4636]: I1002 22:16:29.612124 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:16:30 crc kubenswrapper[4636]: I1002 22:16:30.064685 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"9bb640c19c9cb62bec88c84ce4e3f14b019a1967ad0b56a061a073660d8e6253"} Oct 02 22:18:53 crc kubenswrapper[4636]: I1002 22:18:53.117316 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:18:53 crc kubenswrapper[4636]: I1002 22:18:53.117979 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:19:23 crc kubenswrapper[4636]: I1002 22:19:23.117731 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:19:23 crc kubenswrapper[4636]: I1002 22:19:23.118409 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:19:53 crc kubenswrapper[4636]: I1002 22:19:53.117278 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:19:53 crc kubenswrapper[4636]: I1002 22:19:53.119183 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:19:53 crc kubenswrapper[4636]: I1002 22:19:53.119331 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 22:19:53 crc kubenswrapper[4636]: I1002 22:19:53.120244 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9bb640c19c9cb62bec88c84ce4e3f14b019a1967ad0b56a061a073660d8e6253"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 22:19:53 crc kubenswrapper[4636]: I1002 22:19:53.120429 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://9bb640c19c9cb62bec88c84ce4e3f14b019a1967ad0b56a061a073660d8e6253" gracePeriod=600 Oct 02 22:19:53 crc kubenswrapper[4636]: I1002 22:19:53.959292 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="9bb640c19c9cb62bec88c84ce4e3f14b019a1967ad0b56a061a073660d8e6253" exitCode=0 Oct 02 22:19:53 crc kubenswrapper[4636]: I1002 22:19:53.959387 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"9bb640c19c9cb62bec88c84ce4e3f14b019a1967ad0b56a061a073660d8e6253"} Oct 02 22:19:53 crc kubenswrapper[4636]: I1002 22:19:53.959947 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890"} Oct 02 22:19:53 crc kubenswrapper[4636]: I1002 22:19:53.959981 4636 scope.go:117] "RemoveContainer" containerID="7c712488910116436b64332133a581ed79a473de2fdd72e808b5637bf5913ab5" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.715452 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2qnhf"] Oct 02 22:20:34 crc kubenswrapper[4636]: E1002 22:20:34.717942 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2489be1-e5c1-453d-937c-bd05f4152075" containerName="registry-server" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.717957 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2489be1-e5c1-453d-937c-bd05f4152075" containerName="registry-server" Oct 02 22:20:34 crc kubenswrapper[4636]: E1002 22:20:34.717986 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b79e796-49c2-4aeb-82be-e68f867201a1" containerName="extract-content" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.717992 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b79e796-49c2-4aeb-82be-e68f867201a1" containerName="extract-content" Oct 02 22:20:34 crc kubenswrapper[4636]: E1002 22:20:34.718004 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b79e796-49c2-4aeb-82be-e68f867201a1" containerName="registry-server" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.718010 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b79e796-49c2-4aeb-82be-e68f867201a1" containerName="registry-server" Oct 02 22:20:34 crc kubenswrapper[4636]: E1002 22:20:34.718021 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b79e796-49c2-4aeb-82be-e68f867201a1" containerName="extract-utilities" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.718027 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b79e796-49c2-4aeb-82be-e68f867201a1" containerName="extract-utilities" Oct 02 22:20:34 crc kubenswrapper[4636]: E1002 22:20:34.718051 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2489be1-e5c1-453d-937c-bd05f4152075" containerName="extract-utilities" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.718059 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2489be1-e5c1-453d-937c-bd05f4152075" containerName="extract-utilities" Oct 02 22:20:34 crc kubenswrapper[4636]: E1002 22:20:34.718074 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d42ca824-f82c-40c1-b997-bcb3008c6c37" containerName="collect-profiles" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.718081 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="d42ca824-f82c-40c1-b997-bcb3008c6c37" containerName="collect-profiles" Oct 02 22:20:34 crc kubenswrapper[4636]: E1002 22:20:34.718100 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2489be1-e5c1-453d-937c-bd05f4152075" containerName="extract-content" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.718108 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2489be1-e5c1-453d-937c-bd05f4152075" containerName="extract-content" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.718298 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2489be1-e5c1-453d-937c-bd05f4152075" containerName="registry-server" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.718311 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="d42ca824-f82c-40c1-b997-bcb3008c6c37" containerName="collect-profiles" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.718333 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b79e796-49c2-4aeb-82be-e68f867201a1" containerName="registry-server" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.719653 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.753046 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2qnhf"] Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.807703 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbx4v\" (UniqueName: \"kubernetes.io/projected/1eabb08c-df05-4a48-84e2-7375ee223483-kube-api-access-gbx4v\") pod \"redhat-operators-2qnhf\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.807833 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-utilities\") pod \"redhat-operators-2qnhf\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.807925 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-catalog-content\") pod \"redhat-operators-2qnhf\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.909283 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-utilities\") pod \"redhat-operators-2qnhf\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.909373 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-catalog-content\") pod \"redhat-operators-2qnhf\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.909505 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbx4v\" (UniqueName: \"kubernetes.io/projected/1eabb08c-df05-4a48-84e2-7375ee223483-kube-api-access-gbx4v\") pod \"redhat-operators-2qnhf\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.910365 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-utilities\") pod \"redhat-operators-2qnhf\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.910458 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-catalog-content\") pod \"redhat-operators-2qnhf\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:34 crc kubenswrapper[4636]: I1002 22:20:34.938790 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbx4v\" (UniqueName: \"kubernetes.io/projected/1eabb08c-df05-4a48-84e2-7375ee223483-kube-api-access-gbx4v\") pod \"redhat-operators-2qnhf\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:35 crc kubenswrapper[4636]: I1002 22:20:35.040829 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:35 crc kubenswrapper[4636]: I1002 22:20:35.575713 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2qnhf"] Oct 02 22:20:36 crc kubenswrapper[4636]: I1002 22:20:36.361182 4636 generic.go:334] "Generic (PLEG): container finished" podID="1eabb08c-df05-4a48-84e2-7375ee223483" containerID="4fd1560f1a44fae60312da73d87a44fa1f3af5ba03f8494fe29a640b6f88e975" exitCode=0 Oct 02 22:20:36 crc kubenswrapper[4636]: I1002 22:20:36.361223 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qnhf" event={"ID":"1eabb08c-df05-4a48-84e2-7375ee223483","Type":"ContainerDied","Data":"4fd1560f1a44fae60312da73d87a44fa1f3af5ba03f8494fe29a640b6f88e975"} Oct 02 22:20:36 crc kubenswrapper[4636]: I1002 22:20:36.361564 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qnhf" event={"ID":"1eabb08c-df05-4a48-84e2-7375ee223483","Type":"ContainerStarted","Data":"3a038f7b3b6f3dde395d392510869274fde3832577e3db3a9e8aba50b5104ecd"} Oct 02 22:20:36 crc kubenswrapper[4636]: I1002 22:20:36.364561 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 22:20:38 crc kubenswrapper[4636]: I1002 22:20:38.380727 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qnhf" event={"ID":"1eabb08c-df05-4a48-84e2-7375ee223483","Type":"ContainerStarted","Data":"ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43"} Oct 02 22:20:40 crc kubenswrapper[4636]: I1002 22:20:40.404569 4636 generic.go:334] "Generic (PLEG): container finished" podID="1eabb08c-df05-4a48-84e2-7375ee223483" containerID="ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43" exitCode=0 Oct 02 22:20:40 crc kubenswrapper[4636]: I1002 22:20:40.404954 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qnhf" event={"ID":"1eabb08c-df05-4a48-84e2-7375ee223483","Type":"ContainerDied","Data":"ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43"} Oct 02 22:20:41 crc kubenswrapper[4636]: I1002 22:20:41.415700 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qnhf" event={"ID":"1eabb08c-df05-4a48-84e2-7375ee223483","Type":"ContainerStarted","Data":"ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4"} Oct 02 22:20:41 crc kubenswrapper[4636]: I1002 22:20:41.466212 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2qnhf" podStartSLOduration=2.9636019510000002 podStartE2EDuration="7.466188449s" podCreationTimestamp="2025-10-02 22:20:34 +0000 UTC" firstStartedPulling="2025-10-02 22:20:36.36411523 +0000 UTC m=+3427.687123289" lastFinishedPulling="2025-10-02 22:20:40.866701778 +0000 UTC m=+3432.189709787" observedRunningTime="2025-10-02 22:20:41.43863418 +0000 UTC m=+3432.761642199" watchObservedRunningTime="2025-10-02 22:20:41.466188449 +0000 UTC m=+3432.789196638" Oct 02 22:20:45 crc kubenswrapper[4636]: I1002 22:20:45.041983 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:45 crc kubenswrapper[4636]: I1002 22:20:45.042527 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:46 crc kubenswrapper[4636]: I1002 22:20:46.096369 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2qnhf" podUID="1eabb08c-df05-4a48-84e2-7375ee223483" containerName="registry-server" probeResult="failure" output=< Oct 02 22:20:46 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 22:20:46 crc kubenswrapper[4636]: > Oct 02 22:20:55 crc kubenswrapper[4636]: I1002 22:20:55.153684 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:55 crc kubenswrapper[4636]: I1002 22:20:55.214620 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:55 crc kubenswrapper[4636]: I1002 22:20:55.402137 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2qnhf"] Oct 02 22:20:56 crc kubenswrapper[4636]: I1002 22:20:56.557774 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2qnhf" podUID="1eabb08c-df05-4a48-84e2-7375ee223483" containerName="registry-server" containerID="cri-o://ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4" gracePeriod=2 Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.151233 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.293019 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbx4v\" (UniqueName: \"kubernetes.io/projected/1eabb08c-df05-4a48-84e2-7375ee223483-kube-api-access-gbx4v\") pod \"1eabb08c-df05-4a48-84e2-7375ee223483\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.293373 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-catalog-content\") pod \"1eabb08c-df05-4a48-84e2-7375ee223483\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.293478 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-utilities\") pod \"1eabb08c-df05-4a48-84e2-7375ee223483\" (UID: \"1eabb08c-df05-4a48-84e2-7375ee223483\") " Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.294660 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-utilities" (OuterVolumeSpecName: "utilities") pod "1eabb08c-df05-4a48-84e2-7375ee223483" (UID: "1eabb08c-df05-4a48-84e2-7375ee223483"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.304249 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eabb08c-df05-4a48-84e2-7375ee223483-kube-api-access-gbx4v" (OuterVolumeSpecName: "kube-api-access-gbx4v") pod "1eabb08c-df05-4a48-84e2-7375ee223483" (UID: "1eabb08c-df05-4a48-84e2-7375ee223483"). InnerVolumeSpecName "kube-api-access-gbx4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.380719 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1eabb08c-df05-4a48-84e2-7375ee223483" (UID: "1eabb08c-df05-4a48-84e2-7375ee223483"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.395568 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.395612 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1eabb08c-df05-4a48-84e2-7375ee223483-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.395623 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbx4v\" (UniqueName: \"kubernetes.io/projected/1eabb08c-df05-4a48-84e2-7375ee223483-kube-api-access-gbx4v\") on node \"crc\" DevicePath \"\"" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.568314 4636 generic.go:334] "Generic (PLEG): container finished" podID="1eabb08c-df05-4a48-84e2-7375ee223483" containerID="ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4" exitCode=0 Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.568359 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qnhf" event={"ID":"1eabb08c-df05-4a48-84e2-7375ee223483","Type":"ContainerDied","Data":"ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4"} Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.568372 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qnhf" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.568391 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qnhf" event={"ID":"1eabb08c-df05-4a48-84e2-7375ee223483","Type":"ContainerDied","Data":"3a038f7b3b6f3dde395d392510869274fde3832577e3db3a9e8aba50b5104ecd"} Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.568416 4636 scope.go:117] "RemoveContainer" containerID="ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.588804 4636 scope.go:117] "RemoveContainer" containerID="ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.600523 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2qnhf"] Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.622406 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2qnhf"] Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.631323 4636 scope.go:117] "RemoveContainer" containerID="4fd1560f1a44fae60312da73d87a44fa1f3af5ba03f8494fe29a640b6f88e975" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.656970 4636 scope.go:117] "RemoveContainer" containerID="ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4" Oct 02 22:20:57 crc kubenswrapper[4636]: E1002 22:20:57.657468 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4\": container with ID starting with ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4 not found: ID does not exist" containerID="ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.657503 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4"} err="failed to get container status \"ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4\": rpc error: code = NotFound desc = could not find container \"ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4\": container with ID starting with ab6f4d87c090b3a488689d0d00c00c0963aa0fb7a52fb48d83ceaceeb0b9c2b4 not found: ID does not exist" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.657528 4636 scope.go:117] "RemoveContainer" containerID="ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43" Oct 02 22:20:57 crc kubenswrapper[4636]: E1002 22:20:57.658517 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43\": container with ID starting with ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43 not found: ID does not exist" containerID="ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.658559 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43"} err="failed to get container status \"ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43\": rpc error: code = NotFound desc = could not find container \"ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43\": container with ID starting with ea88052bb390dc1d1176b7dec6f4ad1d6ee6d7632eec913628671c932109dd43 not found: ID does not exist" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.658587 4636 scope.go:117] "RemoveContainer" containerID="4fd1560f1a44fae60312da73d87a44fa1f3af5ba03f8494fe29a640b6f88e975" Oct 02 22:20:57 crc kubenswrapper[4636]: E1002 22:20:57.658902 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fd1560f1a44fae60312da73d87a44fa1f3af5ba03f8494fe29a640b6f88e975\": container with ID starting with 4fd1560f1a44fae60312da73d87a44fa1f3af5ba03f8494fe29a640b6f88e975 not found: ID does not exist" containerID="4fd1560f1a44fae60312da73d87a44fa1f3af5ba03f8494fe29a640b6f88e975" Oct 02 22:20:57 crc kubenswrapper[4636]: I1002 22:20:57.658930 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fd1560f1a44fae60312da73d87a44fa1f3af5ba03f8494fe29a640b6f88e975"} err="failed to get container status \"4fd1560f1a44fae60312da73d87a44fa1f3af5ba03f8494fe29a640b6f88e975\": rpc error: code = NotFound desc = could not find container \"4fd1560f1a44fae60312da73d87a44fa1f3af5ba03f8494fe29a640b6f88e975\": container with ID starting with 4fd1560f1a44fae60312da73d87a44fa1f3af5ba03f8494fe29a640b6f88e975 not found: ID does not exist" Oct 02 22:20:59 crc kubenswrapper[4636]: I1002 22:20:59.613781 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eabb08c-df05-4a48-84e2-7375ee223483" path="/var/lib/kubelet/pods/1eabb08c-df05-4a48-84e2-7375ee223483/volumes" Oct 02 22:21:53 crc kubenswrapper[4636]: I1002 22:21:53.117295 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:21:53 crc kubenswrapper[4636]: I1002 22:21:53.118703 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:22:23 crc kubenswrapper[4636]: I1002 22:22:23.117428 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:22:23 crc kubenswrapper[4636]: I1002 22:22:23.117955 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:22:53 crc kubenswrapper[4636]: I1002 22:22:53.117418 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:22:53 crc kubenswrapper[4636]: I1002 22:22:53.118063 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:22:53 crc kubenswrapper[4636]: I1002 22:22:53.118121 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 22:22:53 crc kubenswrapper[4636]: I1002 22:22:53.118984 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 22:22:53 crc kubenswrapper[4636]: I1002 22:22:53.119054 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" gracePeriod=600 Oct 02 22:22:53 crc kubenswrapper[4636]: E1002 22:22:53.261300 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:22:53 crc kubenswrapper[4636]: I1002 22:22:53.733934 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" exitCode=0 Oct 02 22:22:53 crc kubenswrapper[4636]: I1002 22:22:53.734023 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890"} Oct 02 22:22:53 crc kubenswrapper[4636]: I1002 22:22:53.734094 4636 scope.go:117] "RemoveContainer" containerID="9bb640c19c9cb62bec88c84ce4e3f14b019a1967ad0b56a061a073660d8e6253" Oct 02 22:22:53 crc kubenswrapper[4636]: I1002 22:22:53.735234 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:22:53 crc kubenswrapper[4636]: E1002 22:22:53.735778 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:23:06 crc kubenswrapper[4636]: I1002 22:23:06.603786 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:23:06 crc kubenswrapper[4636]: E1002 22:23:06.604586 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:23:19 crc kubenswrapper[4636]: I1002 22:23:19.610601 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:23:19 crc kubenswrapper[4636]: E1002 22:23:19.611371 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:23:33 crc kubenswrapper[4636]: I1002 22:23:33.605210 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:23:33 crc kubenswrapper[4636]: E1002 22:23:33.606104 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:23:47 crc kubenswrapper[4636]: I1002 22:23:47.604887 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:23:47 crc kubenswrapper[4636]: E1002 22:23:47.605999 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:23:59 crc kubenswrapper[4636]: I1002 22:23:59.606427 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:23:59 crc kubenswrapper[4636]: E1002 22:23:59.607221 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:24:10 crc kubenswrapper[4636]: I1002 22:24:10.603666 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:24:10 crc kubenswrapper[4636]: E1002 22:24:10.604355 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:24:23 crc kubenswrapper[4636]: I1002 22:24:23.604068 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:24:23 crc kubenswrapper[4636]: E1002 22:24:23.604851 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.367986 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9z2nw"] Oct 02 22:24:26 crc kubenswrapper[4636]: E1002 22:24:26.369066 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eabb08c-df05-4a48-84e2-7375ee223483" containerName="registry-server" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.369083 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eabb08c-df05-4a48-84e2-7375ee223483" containerName="registry-server" Oct 02 22:24:26 crc kubenswrapper[4636]: E1002 22:24:26.369097 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eabb08c-df05-4a48-84e2-7375ee223483" containerName="extract-content" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.369105 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eabb08c-df05-4a48-84e2-7375ee223483" containerName="extract-content" Oct 02 22:24:26 crc kubenswrapper[4636]: E1002 22:24:26.369118 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eabb08c-df05-4a48-84e2-7375ee223483" containerName="extract-utilities" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.369126 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eabb08c-df05-4a48-84e2-7375ee223483" containerName="extract-utilities" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.369354 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eabb08c-df05-4a48-84e2-7375ee223483" containerName="registry-server" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.371353 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.397477 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9z2nw"] Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.447231 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-utilities\") pod \"certified-operators-9z2nw\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.447302 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wzlj\" (UniqueName: \"kubernetes.io/projected/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-kube-api-access-8wzlj\") pod \"certified-operators-9z2nw\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.447349 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-catalog-content\") pod \"certified-operators-9z2nw\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.549418 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-catalog-content\") pod \"certified-operators-9z2nw\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.549572 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-utilities\") pod \"certified-operators-9z2nw\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.549613 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wzlj\" (UniqueName: \"kubernetes.io/projected/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-kube-api-access-8wzlj\") pod \"certified-operators-9z2nw\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.550081 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-catalog-content\") pod \"certified-operators-9z2nw\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.550123 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-utilities\") pod \"certified-operators-9z2nw\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.570789 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wzlj\" (UniqueName: \"kubernetes.io/projected/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-kube-api-access-8wzlj\") pod \"certified-operators-9z2nw\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:26 crc kubenswrapper[4636]: I1002 22:24:26.701397 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:27 crc kubenswrapper[4636]: I1002 22:24:27.312932 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9z2nw"] Oct 02 22:24:27 crc kubenswrapper[4636]: I1002 22:24:27.562868 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9z2nw" event={"ID":"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93","Type":"ContainerStarted","Data":"f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6"} Oct 02 22:24:27 crc kubenswrapper[4636]: I1002 22:24:27.563790 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9z2nw" event={"ID":"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93","Type":"ContainerStarted","Data":"947454d3089ad29006df7a4ccb61c37aa5e04b94a5993c08195f5be39571cf5c"} Oct 02 22:24:28 crc kubenswrapper[4636]: I1002 22:24:28.572425 4636 generic.go:334] "Generic (PLEG): container finished" podID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" containerID="f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6" exitCode=0 Oct 02 22:24:28 crc kubenswrapper[4636]: I1002 22:24:28.572518 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9z2nw" event={"ID":"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93","Type":"ContainerDied","Data":"f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6"} Oct 02 22:24:29 crc kubenswrapper[4636]: I1002 22:24:29.586064 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9z2nw" event={"ID":"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93","Type":"ContainerStarted","Data":"c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8"} Oct 02 22:24:30 crc kubenswrapper[4636]: I1002 22:24:30.595939 4636 generic.go:334] "Generic (PLEG): container finished" podID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" containerID="c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8" exitCode=0 Oct 02 22:24:30 crc kubenswrapper[4636]: I1002 22:24:30.596023 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9z2nw" event={"ID":"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93","Type":"ContainerDied","Data":"c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8"} Oct 02 22:24:31 crc kubenswrapper[4636]: I1002 22:24:31.613401 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9z2nw" event={"ID":"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93","Type":"ContainerStarted","Data":"eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b"} Oct 02 22:24:31 crc kubenswrapper[4636]: I1002 22:24:31.629975 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9z2nw" podStartSLOduration=3.005066743 podStartE2EDuration="5.629952915s" podCreationTimestamp="2025-10-02 22:24:26 +0000 UTC" firstStartedPulling="2025-10-02 22:24:28.574918791 +0000 UTC m=+3659.897926830" lastFinishedPulling="2025-10-02 22:24:31.199804983 +0000 UTC m=+3662.522813002" observedRunningTime="2025-10-02 22:24:31.623742849 +0000 UTC m=+3662.946750868" watchObservedRunningTime="2025-10-02 22:24:31.629952915 +0000 UTC m=+3662.952960944" Oct 02 22:24:36 crc kubenswrapper[4636]: I1002 22:24:36.701711 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:36 crc kubenswrapper[4636]: I1002 22:24:36.702118 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:36 crc kubenswrapper[4636]: I1002 22:24:36.759678 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:37 crc kubenswrapper[4636]: I1002 22:24:37.697109 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:37 crc kubenswrapper[4636]: I1002 22:24:37.747053 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9z2nw"] Oct 02 22:24:38 crc kubenswrapper[4636]: I1002 22:24:38.604204 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:24:38 crc kubenswrapper[4636]: E1002 22:24:38.604620 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:24:39 crc kubenswrapper[4636]: I1002 22:24:39.671914 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9z2nw" podUID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" containerName="registry-server" containerID="cri-o://eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b" gracePeriod=2 Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.247835 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.416510 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wzlj\" (UniqueName: \"kubernetes.io/projected/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-kube-api-access-8wzlj\") pod \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.416560 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-catalog-content\") pod \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.416589 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-utilities\") pod \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\" (UID: \"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93\") " Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.417895 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-utilities" (OuterVolumeSpecName: "utilities") pod "b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" (UID: "b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.431572 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-kube-api-access-8wzlj" (OuterVolumeSpecName: "kube-api-access-8wzlj") pod "b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" (UID: "b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93"). InnerVolumeSpecName "kube-api-access-8wzlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.466000 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" (UID: "b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.518200 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wzlj\" (UniqueName: \"kubernetes.io/projected/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-kube-api-access-8wzlj\") on node \"crc\" DevicePath \"\"" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.518234 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.518249 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.682303 4636 generic.go:334] "Generic (PLEG): container finished" podID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" containerID="eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b" exitCode=0 Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.682345 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9z2nw" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.682371 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9z2nw" event={"ID":"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93","Type":"ContainerDied","Data":"eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b"} Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.682430 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9z2nw" event={"ID":"b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93","Type":"ContainerDied","Data":"947454d3089ad29006df7a4ccb61c37aa5e04b94a5993c08195f5be39571cf5c"} Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.682452 4636 scope.go:117] "RemoveContainer" containerID="eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.707038 4636 scope.go:117] "RemoveContainer" containerID="c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.725474 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9z2nw"] Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.747979 4636 scope.go:117] "RemoveContainer" containerID="f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.748199 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9z2nw"] Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.777874 4636 scope.go:117] "RemoveContainer" containerID="eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b" Oct 02 22:24:40 crc kubenswrapper[4636]: E1002 22:24:40.778356 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b\": container with ID starting with eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b not found: ID does not exist" containerID="eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.778384 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b"} err="failed to get container status \"eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b\": rpc error: code = NotFound desc = could not find container \"eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b\": container with ID starting with eb6bf66099dd57b29b46f4012070fa6702d26834964db4fba13faa4299b95e0b not found: ID does not exist" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.778404 4636 scope.go:117] "RemoveContainer" containerID="c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8" Oct 02 22:24:40 crc kubenswrapper[4636]: E1002 22:24:40.778632 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8\": container with ID starting with c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8 not found: ID does not exist" containerID="c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.778656 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8"} err="failed to get container status \"c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8\": rpc error: code = NotFound desc = could not find container \"c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8\": container with ID starting with c5e8700852b3125c384c86eb7f57485dc5be8593a0a2bd38a759111d97bf0da8 not found: ID does not exist" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.778669 4636 scope.go:117] "RemoveContainer" containerID="f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6" Oct 02 22:24:40 crc kubenswrapper[4636]: E1002 22:24:40.779063 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6\": container with ID starting with f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6 not found: ID does not exist" containerID="f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6" Oct 02 22:24:40 crc kubenswrapper[4636]: I1002 22:24:40.779086 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6"} err="failed to get container status \"f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6\": rpc error: code = NotFound desc = could not find container \"f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6\": container with ID starting with f0e7cc8757f955d541d1ab43154016ed106d713e31e6f9b1309b03e4c7b38bc6 not found: ID does not exist" Oct 02 22:24:41 crc kubenswrapper[4636]: I1002 22:24:41.615187 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" path="/var/lib/kubelet/pods/b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93/volumes" Oct 02 22:24:50 crc kubenswrapper[4636]: I1002 22:24:50.604047 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:24:50 crc kubenswrapper[4636]: E1002 22:24:50.604884 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:25:03 crc kubenswrapper[4636]: I1002 22:25:03.603628 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:25:03 crc kubenswrapper[4636]: E1002 22:25:03.605393 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:25:15 crc kubenswrapper[4636]: I1002 22:25:15.603627 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:25:15 crc kubenswrapper[4636]: E1002 22:25:15.604416 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:25:26 crc kubenswrapper[4636]: I1002 22:25:26.604294 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:25:26 crc kubenswrapper[4636]: E1002 22:25:26.605702 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.243407 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dsxvz"] Oct 02 22:25:32 crc kubenswrapper[4636]: E1002 22:25:32.244430 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" containerName="extract-utilities" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.244444 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" containerName="extract-utilities" Oct 02 22:25:32 crc kubenswrapper[4636]: E1002 22:25:32.244479 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" containerName="registry-server" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.244487 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" containerName="registry-server" Oct 02 22:25:32 crc kubenswrapper[4636]: E1002 22:25:32.244505 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" containerName="extract-content" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.244514 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" containerName="extract-content" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.244767 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b06fbcbd-86ff-4da0-84a2-ba8e9b5c2f93" containerName="registry-server" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.246544 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.252244 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dsxvz"] Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.436782 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qxrl\" (UniqueName: \"kubernetes.io/projected/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-kube-api-access-6qxrl\") pod \"community-operators-dsxvz\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.437625 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-utilities\") pod \"community-operators-dsxvz\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.437890 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-catalog-content\") pod \"community-operators-dsxvz\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.539410 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qxrl\" (UniqueName: \"kubernetes.io/projected/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-kube-api-access-6qxrl\") pod \"community-operators-dsxvz\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.539508 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-utilities\") pod \"community-operators-dsxvz\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.539597 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-catalog-content\") pod \"community-operators-dsxvz\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.540095 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-utilities\") pod \"community-operators-dsxvz\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.540132 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-catalog-content\") pod \"community-operators-dsxvz\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.559778 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qxrl\" (UniqueName: \"kubernetes.io/projected/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-kube-api-access-6qxrl\") pod \"community-operators-dsxvz\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:32 crc kubenswrapper[4636]: I1002 22:25:32.568493 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:33 crc kubenswrapper[4636]: I1002 22:25:33.170040 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dsxvz"] Oct 02 22:25:33 crc kubenswrapper[4636]: I1002 22:25:33.230798 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsxvz" event={"ID":"41495ec4-0db6-44dc-ad6d-5fd3858e4a08","Type":"ContainerStarted","Data":"8e1bb94460c0f861b916d8df17a853725f7dc2ea1d08c673e5dc0401d9802eb4"} Oct 02 22:25:34 crc kubenswrapper[4636]: I1002 22:25:34.241228 4636 generic.go:334] "Generic (PLEG): container finished" podID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" containerID="393925871c284dd1835dcdcc4ae4d74f23243891ff09acaeaba5420c0a9eee4a" exitCode=0 Oct 02 22:25:34 crc kubenswrapper[4636]: I1002 22:25:34.241284 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsxvz" event={"ID":"41495ec4-0db6-44dc-ad6d-5fd3858e4a08","Type":"ContainerDied","Data":"393925871c284dd1835dcdcc4ae4d74f23243891ff09acaeaba5420c0a9eee4a"} Oct 02 22:25:36 crc kubenswrapper[4636]: I1002 22:25:36.264110 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsxvz" event={"ID":"41495ec4-0db6-44dc-ad6d-5fd3858e4a08","Type":"ContainerStarted","Data":"431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039"} Oct 02 22:25:38 crc kubenswrapper[4636]: I1002 22:25:38.286004 4636 generic.go:334] "Generic (PLEG): container finished" podID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" containerID="431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039" exitCode=0 Oct 02 22:25:38 crc kubenswrapper[4636]: I1002 22:25:38.286060 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsxvz" event={"ID":"41495ec4-0db6-44dc-ad6d-5fd3858e4a08","Type":"ContainerDied","Data":"431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039"} Oct 02 22:25:38 crc kubenswrapper[4636]: I1002 22:25:38.288903 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 22:25:41 crc kubenswrapper[4636]: I1002 22:25:41.603824 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:25:41 crc kubenswrapper[4636]: E1002 22:25:41.604565 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:25:43 crc kubenswrapper[4636]: I1002 22:25:43.343130 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsxvz" event={"ID":"41495ec4-0db6-44dc-ad6d-5fd3858e4a08","Type":"ContainerStarted","Data":"7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639"} Oct 02 22:25:43 crc kubenswrapper[4636]: I1002 22:25:43.372657 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dsxvz" podStartSLOduration=2.623645218 podStartE2EDuration="11.372630921s" podCreationTimestamp="2025-10-02 22:25:32 +0000 UTC" firstStartedPulling="2025-10-02 22:25:34.243026741 +0000 UTC m=+3725.566034760" lastFinishedPulling="2025-10-02 22:25:42.992012434 +0000 UTC m=+3734.315020463" observedRunningTime="2025-10-02 22:25:43.368013688 +0000 UTC m=+3734.691021727" watchObservedRunningTime="2025-10-02 22:25:43.372630921 +0000 UTC m=+3734.695638940" Oct 02 22:25:52 crc kubenswrapper[4636]: I1002 22:25:52.569123 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:52 crc kubenswrapper[4636]: I1002 22:25:52.569652 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:52 crc kubenswrapper[4636]: I1002 22:25:52.641445 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:53 crc kubenswrapper[4636]: I1002 22:25:53.512588 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:53 crc kubenswrapper[4636]: I1002 22:25:53.558803 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dsxvz"] Oct 02 22:25:53 crc kubenswrapper[4636]: I1002 22:25:53.604056 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:25:53 crc kubenswrapper[4636]: E1002 22:25:53.604358 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:25:55 crc kubenswrapper[4636]: I1002 22:25:55.479779 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dsxvz" podUID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" containerName="registry-server" containerID="cri-o://7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639" gracePeriod=2 Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.142142 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.240270 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-catalog-content\") pod \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.240363 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qxrl\" (UniqueName: \"kubernetes.io/projected/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-kube-api-access-6qxrl\") pod \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.240569 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-utilities\") pod \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\" (UID: \"41495ec4-0db6-44dc-ad6d-5fd3858e4a08\") " Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.241482 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-utilities" (OuterVolumeSpecName: "utilities") pod "41495ec4-0db6-44dc-ad6d-5fd3858e4a08" (UID: "41495ec4-0db6-44dc-ad6d-5fd3858e4a08"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.247779 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-kube-api-access-6qxrl" (OuterVolumeSpecName: "kube-api-access-6qxrl") pod "41495ec4-0db6-44dc-ad6d-5fd3858e4a08" (UID: "41495ec4-0db6-44dc-ad6d-5fd3858e4a08"). InnerVolumeSpecName "kube-api-access-6qxrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.292680 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41495ec4-0db6-44dc-ad6d-5fd3858e4a08" (UID: "41495ec4-0db6-44dc-ad6d-5fd3858e4a08"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.342673 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.342705 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qxrl\" (UniqueName: \"kubernetes.io/projected/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-kube-api-access-6qxrl\") on node \"crc\" DevicePath \"\"" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.342717 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41495ec4-0db6-44dc-ad6d-5fd3858e4a08-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.492614 4636 generic.go:334] "Generic (PLEG): container finished" podID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" containerID="7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639" exitCode=0 Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.492666 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsxvz" event={"ID":"41495ec4-0db6-44dc-ad6d-5fd3858e4a08","Type":"ContainerDied","Data":"7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639"} Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.493727 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsxvz" event={"ID":"41495ec4-0db6-44dc-ad6d-5fd3858e4a08","Type":"ContainerDied","Data":"8e1bb94460c0f861b916d8df17a853725f7dc2ea1d08c673e5dc0401d9802eb4"} Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.492693 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsxvz" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.493773 4636 scope.go:117] "RemoveContainer" containerID="7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.522448 4636 scope.go:117] "RemoveContainer" containerID="431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.543956 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dsxvz"] Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.552505 4636 scope.go:117] "RemoveContainer" containerID="393925871c284dd1835dcdcc4ae4d74f23243891ff09acaeaba5420c0a9eee4a" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.557377 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dsxvz"] Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.586068 4636 scope.go:117] "RemoveContainer" containerID="7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639" Oct 02 22:25:56 crc kubenswrapper[4636]: E1002 22:25:56.586586 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639\": container with ID starting with 7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639 not found: ID does not exist" containerID="7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.586623 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639"} err="failed to get container status \"7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639\": rpc error: code = NotFound desc = could not find container \"7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639\": container with ID starting with 7225b164d84ae806aebcab91291717b3bd8cbbcad85f2e4bed334bbd36f31639 not found: ID does not exist" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.586647 4636 scope.go:117] "RemoveContainer" containerID="431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039" Oct 02 22:25:56 crc kubenswrapper[4636]: E1002 22:25:56.586988 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039\": container with ID starting with 431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039 not found: ID does not exist" containerID="431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.587094 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039"} err="failed to get container status \"431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039\": rpc error: code = NotFound desc = could not find container \"431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039\": container with ID starting with 431bb5df6517d2c626aaeddd4bd54aab303362a0f25c9ee26f210807961bf039 not found: ID does not exist" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.587193 4636 scope.go:117] "RemoveContainer" containerID="393925871c284dd1835dcdcc4ae4d74f23243891ff09acaeaba5420c0a9eee4a" Oct 02 22:25:56 crc kubenswrapper[4636]: E1002 22:25:56.589359 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"393925871c284dd1835dcdcc4ae4d74f23243891ff09acaeaba5420c0a9eee4a\": container with ID starting with 393925871c284dd1835dcdcc4ae4d74f23243891ff09acaeaba5420c0a9eee4a not found: ID does not exist" containerID="393925871c284dd1835dcdcc4ae4d74f23243891ff09acaeaba5420c0a9eee4a" Oct 02 22:25:56 crc kubenswrapper[4636]: I1002 22:25:56.589536 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"393925871c284dd1835dcdcc4ae4d74f23243891ff09acaeaba5420c0a9eee4a"} err="failed to get container status \"393925871c284dd1835dcdcc4ae4d74f23243891ff09acaeaba5420c0a9eee4a\": rpc error: code = NotFound desc = could not find container \"393925871c284dd1835dcdcc4ae4d74f23243891ff09acaeaba5420c0a9eee4a\": container with ID starting with 393925871c284dd1835dcdcc4ae4d74f23243891ff09acaeaba5420c0a9eee4a not found: ID does not exist" Oct 02 22:25:57 crc kubenswrapper[4636]: I1002 22:25:57.616007 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" path="/var/lib/kubelet/pods/41495ec4-0db6-44dc-ad6d-5fd3858e4a08/volumes" Oct 02 22:26:08 crc kubenswrapper[4636]: I1002 22:26:08.603427 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:26:08 crc kubenswrapper[4636]: E1002 22:26:08.604253 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:26:17 crc kubenswrapper[4636]: I1002 22:26:17.918795 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j5jxb"] Oct 02 22:26:17 crc kubenswrapper[4636]: E1002 22:26:17.919648 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" containerName="extract-utilities" Oct 02 22:26:17 crc kubenswrapper[4636]: I1002 22:26:17.919661 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" containerName="extract-utilities" Oct 02 22:26:17 crc kubenswrapper[4636]: E1002 22:26:17.919678 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" containerName="registry-server" Oct 02 22:26:17 crc kubenswrapper[4636]: I1002 22:26:17.919684 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" containerName="registry-server" Oct 02 22:26:17 crc kubenswrapper[4636]: E1002 22:26:17.919712 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" containerName="extract-content" Oct 02 22:26:17 crc kubenswrapper[4636]: I1002 22:26:17.919719 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" containerName="extract-content" Oct 02 22:26:17 crc kubenswrapper[4636]: I1002 22:26:17.919908 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="41495ec4-0db6-44dc-ad6d-5fd3858e4a08" containerName="registry-server" Oct 02 22:26:17 crc kubenswrapper[4636]: I1002 22:26:17.921182 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:17 crc kubenswrapper[4636]: I1002 22:26:17.939076 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j5jxb"] Oct 02 22:26:17 crc kubenswrapper[4636]: I1002 22:26:17.972943 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfd27\" (UniqueName: \"kubernetes.io/projected/968dffbb-e83c-4cd9-b72f-a4478f952e48-kube-api-access-gfd27\") pod \"redhat-marketplace-j5jxb\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:17 crc kubenswrapper[4636]: I1002 22:26:17.973015 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-utilities\") pod \"redhat-marketplace-j5jxb\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:17 crc kubenswrapper[4636]: I1002 22:26:17.973145 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-catalog-content\") pod \"redhat-marketplace-j5jxb\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:18 crc kubenswrapper[4636]: I1002 22:26:18.075641 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfd27\" (UniqueName: \"kubernetes.io/projected/968dffbb-e83c-4cd9-b72f-a4478f952e48-kube-api-access-gfd27\") pod \"redhat-marketplace-j5jxb\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:18 crc kubenswrapper[4636]: I1002 22:26:18.075737 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-utilities\") pod \"redhat-marketplace-j5jxb\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:18 crc kubenswrapper[4636]: I1002 22:26:18.075815 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-catalog-content\") pod \"redhat-marketplace-j5jxb\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:18 crc kubenswrapper[4636]: I1002 22:26:18.076345 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-utilities\") pod \"redhat-marketplace-j5jxb\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:18 crc kubenswrapper[4636]: I1002 22:26:18.076390 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-catalog-content\") pod \"redhat-marketplace-j5jxb\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:18 crc kubenswrapper[4636]: I1002 22:26:18.258116 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfd27\" (UniqueName: \"kubernetes.io/projected/968dffbb-e83c-4cd9-b72f-a4478f952e48-kube-api-access-gfd27\") pod \"redhat-marketplace-j5jxb\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:18 crc kubenswrapper[4636]: I1002 22:26:18.541070 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:19 crc kubenswrapper[4636]: I1002 22:26:19.057578 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j5jxb"] Oct 02 22:26:19 crc kubenswrapper[4636]: I1002 22:26:19.699446 4636 generic.go:334] "Generic (PLEG): container finished" podID="968dffbb-e83c-4cd9-b72f-a4478f952e48" containerID="fbd21e5e8a3a083789a8f99ef2fcd86e3bdb7548d2389bd24663ed815a9f0e9d" exitCode=0 Oct 02 22:26:19 crc kubenswrapper[4636]: I1002 22:26:19.699485 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5jxb" event={"ID":"968dffbb-e83c-4cd9-b72f-a4478f952e48","Type":"ContainerDied","Data":"fbd21e5e8a3a083789a8f99ef2fcd86e3bdb7548d2389bd24663ed815a9f0e9d"} Oct 02 22:26:19 crc kubenswrapper[4636]: I1002 22:26:19.699778 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5jxb" event={"ID":"968dffbb-e83c-4cd9-b72f-a4478f952e48","Type":"ContainerStarted","Data":"af2038817d5eb9c9c8f4a9a2d084c22885595db8c8f975dc31a357f9bf5ad0a6"} Oct 02 22:26:21 crc kubenswrapper[4636]: I1002 22:26:21.604337 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:26:21 crc kubenswrapper[4636]: E1002 22:26:21.605108 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:26:21 crc kubenswrapper[4636]: I1002 22:26:21.731736 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5jxb" event={"ID":"968dffbb-e83c-4cd9-b72f-a4478f952e48","Type":"ContainerStarted","Data":"00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818"} Oct 02 22:26:22 crc kubenswrapper[4636]: I1002 22:26:22.741715 4636 generic.go:334] "Generic (PLEG): container finished" podID="968dffbb-e83c-4cd9-b72f-a4478f952e48" containerID="00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818" exitCode=0 Oct 02 22:26:22 crc kubenswrapper[4636]: I1002 22:26:22.741835 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5jxb" event={"ID":"968dffbb-e83c-4cd9-b72f-a4478f952e48","Type":"ContainerDied","Data":"00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818"} Oct 02 22:26:24 crc kubenswrapper[4636]: I1002 22:26:24.790466 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5jxb" event={"ID":"968dffbb-e83c-4cd9-b72f-a4478f952e48","Type":"ContainerStarted","Data":"07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b"} Oct 02 22:26:24 crc kubenswrapper[4636]: I1002 22:26:24.813596 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j5jxb" podStartSLOduration=3.541645472 podStartE2EDuration="7.813575807s" podCreationTimestamp="2025-10-02 22:26:17 +0000 UTC" firstStartedPulling="2025-10-02 22:26:19.701733411 +0000 UTC m=+3771.024741430" lastFinishedPulling="2025-10-02 22:26:23.973663746 +0000 UTC m=+3775.296671765" observedRunningTime="2025-10-02 22:26:24.808215534 +0000 UTC m=+3776.131223553" watchObservedRunningTime="2025-10-02 22:26:24.813575807 +0000 UTC m=+3776.136583836" Oct 02 22:26:28 crc kubenswrapper[4636]: I1002 22:26:28.541628 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:28 crc kubenswrapper[4636]: I1002 22:26:28.541980 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:28 crc kubenswrapper[4636]: I1002 22:26:28.587316 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:34 crc kubenswrapper[4636]: I1002 22:26:34.604073 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:26:34 crc kubenswrapper[4636]: E1002 22:26:34.604898 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:26:38 crc kubenswrapper[4636]: I1002 22:26:38.587734 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:38 crc kubenswrapper[4636]: I1002 22:26:38.641731 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j5jxb"] Oct 02 22:26:38 crc kubenswrapper[4636]: I1002 22:26:38.909290 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j5jxb" podUID="968dffbb-e83c-4cd9-b72f-a4478f952e48" containerName="registry-server" containerID="cri-o://07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b" gracePeriod=2 Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.506144 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.579288 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-catalog-content\") pod \"968dffbb-e83c-4cd9-b72f-a4478f952e48\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.579418 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-utilities\") pod \"968dffbb-e83c-4cd9-b72f-a4478f952e48\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.579559 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfd27\" (UniqueName: \"kubernetes.io/projected/968dffbb-e83c-4cd9-b72f-a4478f952e48-kube-api-access-gfd27\") pod \"968dffbb-e83c-4cd9-b72f-a4478f952e48\" (UID: \"968dffbb-e83c-4cd9-b72f-a4478f952e48\") " Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.580416 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-utilities" (OuterVolumeSpecName: "utilities") pod "968dffbb-e83c-4cd9-b72f-a4478f952e48" (UID: "968dffbb-e83c-4cd9-b72f-a4478f952e48"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.588994 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/968dffbb-e83c-4cd9-b72f-a4478f952e48-kube-api-access-gfd27" (OuterVolumeSpecName: "kube-api-access-gfd27") pod "968dffbb-e83c-4cd9-b72f-a4478f952e48" (UID: "968dffbb-e83c-4cd9-b72f-a4478f952e48"). InnerVolumeSpecName "kube-api-access-gfd27". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.592558 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "968dffbb-e83c-4cd9-b72f-a4478f952e48" (UID: "968dffbb-e83c-4cd9-b72f-a4478f952e48"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.681321 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.681353 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfd27\" (UniqueName: \"kubernetes.io/projected/968dffbb-e83c-4cd9-b72f-a4478f952e48-kube-api-access-gfd27\") on node \"crc\" DevicePath \"\"" Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.681363 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/968dffbb-e83c-4cd9-b72f-a4478f952e48-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.919644 4636 generic.go:334] "Generic (PLEG): container finished" podID="968dffbb-e83c-4cd9-b72f-a4478f952e48" containerID="07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b" exitCode=0 Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.919698 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j5jxb" Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.919738 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5jxb" event={"ID":"968dffbb-e83c-4cd9-b72f-a4478f952e48","Type":"ContainerDied","Data":"07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b"} Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.920042 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j5jxb" event={"ID":"968dffbb-e83c-4cd9-b72f-a4478f952e48","Type":"ContainerDied","Data":"af2038817d5eb9c9c8f4a9a2d084c22885595db8c8f975dc31a357f9bf5ad0a6"} Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.920067 4636 scope.go:117] "RemoveContainer" containerID="07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b" Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.946941 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j5jxb"] Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.947901 4636 scope.go:117] "RemoveContainer" containerID="00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818" Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.959381 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j5jxb"] Oct 02 22:26:39 crc kubenswrapper[4636]: I1002 22:26:39.992586 4636 scope.go:117] "RemoveContainer" containerID="fbd21e5e8a3a083789a8f99ef2fcd86e3bdb7548d2389bd24663ed815a9f0e9d" Oct 02 22:26:40 crc kubenswrapper[4636]: I1002 22:26:40.022011 4636 scope.go:117] "RemoveContainer" containerID="07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b" Oct 02 22:26:40 crc kubenswrapper[4636]: E1002 22:26:40.024656 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b\": container with ID starting with 07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b not found: ID does not exist" containerID="07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b" Oct 02 22:26:40 crc kubenswrapper[4636]: I1002 22:26:40.024715 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b"} err="failed to get container status \"07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b\": rpc error: code = NotFound desc = could not find container \"07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b\": container with ID starting with 07c3298f3629c701709acba8d55421780c7fe37c3c8c262b619882ac6f8d6e8b not found: ID does not exist" Oct 02 22:26:40 crc kubenswrapper[4636]: I1002 22:26:40.024768 4636 scope.go:117] "RemoveContainer" containerID="00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818" Oct 02 22:26:40 crc kubenswrapper[4636]: E1002 22:26:40.025483 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818\": container with ID starting with 00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818 not found: ID does not exist" containerID="00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818" Oct 02 22:26:40 crc kubenswrapper[4636]: I1002 22:26:40.025515 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818"} err="failed to get container status \"00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818\": rpc error: code = NotFound desc = could not find container \"00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818\": container with ID starting with 00e3dbe35464f72d62d9753a4d535baefb5f14a888afbbf73c3b756fd25a1818 not found: ID does not exist" Oct 02 22:26:40 crc kubenswrapper[4636]: I1002 22:26:40.025537 4636 scope.go:117] "RemoveContainer" containerID="fbd21e5e8a3a083789a8f99ef2fcd86e3bdb7548d2389bd24663ed815a9f0e9d" Oct 02 22:26:40 crc kubenswrapper[4636]: E1002 22:26:40.026950 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbd21e5e8a3a083789a8f99ef2fcd86e3bdb7548d2389bd24663ed815a9f0e9d\": container with ID starting with fbd21e5e8a3a083789a8f99ef2fcd86e3bdb7548d2389bd24663ed815a9f0e9d not found: ID does not exist" containerID="fbd21e5e8a3a083789a8f99ef2fcd86e3bdb7548d2389bd24663ed815a9f0e9d" Oct 02 22:26:40 crc kubenswrapper[4636]: I1002 22:26:40.026973 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbd21e5e8a3a083789a8f99ef2fcd86e3bdb7548d2389bd24663ed815a9f0e9d"} err="failed to get container status \"fbd21e5e8a3a083789a8f99ef2fcd86e3bdb7548d2389bd24663ed815a9f0e9d\": rpc error: code = NotFound desc = could not find container \"fbd21e5e8a3a083789a8f99ef2fcd86e3bdb7548d2389bd24663ed815a9f0e9d\": container with ID starting with fbd21e5e8a3a083789a8f99ef2fcd86e3bdb7548d2389bd24663ed815a9f0e9d not found: ID does not exist" Oct 02 22:26:41 crc kubenswrapper[4636]: I1002 22:26:41.614053 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="968dffbb-e83c-4cd9-b72f-a4478f952e48" path="/var/lib/kubelet/pods/968dffbb-e83c-4cd9-b72f-a4478f952e48/volumes" Oct 02 22:26:49 crc kubenswrapper[4636]: I1002 22:26:49.603441 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:26:49 crc kubenswrapper[4636]: E1002 22:26:49.604834 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:27:02 crc kubenswrapper[4636]: I1002 22:27:02.608543 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:27:02 crc kubenswrapper[4636]: E1002 22:27:02.609731 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:27:14 crc kubenswrapper[4636]: I1002 22:27:14.603614 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:27:14 crc kubenswrapper[4636]: E1002 22:27:14.604373 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:27:29 crc kubenswrapper[4636]: I1002 22:27:29.615327 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:27:29 crc kubenswrapper[4636]: E1002 22:27:29.616128 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:27:41 crc kubenswrapper[4636]: I1002 22:27:41.607067 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:27:41 crc kubenswrapper[4636]: E1002 22:27:41.609022 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:27:54 crc kubenswrapper[4636]: I1002 22:27:54.603979 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:27:55 crc kubenswrapper[4636]: I1002 22:27:55.580872 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"8ad0499b3003e641638fb66ed89dcc75dcd9f4dbbc09197f42075aef7e5eaf14"} Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.142338 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p"] Oct 02 22:30:00 crc kubenswrapper[4636]: E1002 22:30:00.143250 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="968dffbb-e83c-4cd9-b72f-a4478f952e48" containerName="extract-content" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.143267 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="968dffbb-e83c-4cd9-b72f-a4478f952e48" containerName="extract-content" Oct 02 22:30:00 crc kubenswrapper[4636]: E1002 22:30:00.143280 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="968dffbb-e83c-4cd9-b72f-a4478f952e48" containerName="registry-server" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.143288 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="968dffbb-e83c-4cd9-b72f-a4478f952e48" containerName="registry-server" Oct 02 22:30:00 crc kubenswrapper[4636]: E1002 22:30:00.143299 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="968dffbb-e83c-4cd9-b72f-a4478f952e48" containerName="extract-utilities" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.143307 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="968dffbb-e83c-4cd9-b72f-a4478f952e48" containerName="extract-utilities" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.143586 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="968dffbb-e83c-4cd9-b72f-a4478f952e48" containerName="registry-server" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.144359 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.146861 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.147040 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.154882 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p"] Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.246886 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-secret-volume\") pod \"collect-profiles-29324070-5nv2p\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.246943 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67j82\" (UniqueName: \"kubernetes.io/projected/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-kube-api-access-67j82\") pod \"collect-profiles-29324070-5nv2p\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.247141 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-config-volume\") pod \"collect-profiles-29324070-5nv2p\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.349114 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-config-volume\") pod \"collect-profiles-29324070-5nv2p\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.349218 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-secret-volume\") pod \"collect-profiles-29324070-5nv2p\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.349281 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67j82\" (UniqueName: \"kubernetes.io/projected/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-kube-api-access-67j82\") pod \"collect-profiles-29324070-5nv2p\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.350393 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-config-volume\") pod \"collect-profiles-29324070-5nv2p\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.359791 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-secret-volume\") pod \"collect-profiles-29324070-5nv2p\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.365741 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67j82\" (UniqueName: \"kubernetes.io/projected/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-kube-api-access-67j82\") pod \"collect-profiles-29324070-5nv2p\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.483257 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:00 crc kubenswrapper[4636]: I1002 22:30:00.951070 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p"] Oct 02 22:30:01 crc kubenswrapper[4636]: I1002 22:30:01.778783 4636 generic.go:334] "Generic (PLEG): container finished" podID="f2516773-15c8-4e3f-aee7-6267e1fe9e5a" containerID="75bc6a7dca4084ee48af4eb348300bf83585276f981ded0141aba40ad6cf6f6a" exitCode=0 Oct 02 22:30:01 crc kubenswrapper[4636]: I1002 22:30:01.778880 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" event={"ID":"f2516773-15c8-4e3f-aee7-6267e1fe9e5a","Type":"ContainerDied","Data":"75bc6a7dca4084ee48af4eb348300bf83585276f981ded0141aba40ad6cf6f6a"} Oct 02 22:30:01 crc kubenswrapper[4636]: I1002 22:30:01.779059 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" event={"ID":"f2516773-15c8-4e3f-aee7-6267e1fe9e5a","Type":"ContainerStarted","Data":"130ae85d0b6a4ca311e2e4fbfe4af890aa82cf5403290646df6b72d3ce5a1529"} Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.331516 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.407518 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-secret-volume\") pod \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.407582 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67j82\" (UniqueName: \"kubernetes.io/projected/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-kube-api-access-67j82\") pod \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.407644 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-config-volume\") pod \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\" (UID: \"f2516773-15c8-4e3f-aee7-6267e1fe9e5a\") " Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.408215 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-config-volume" (OuterVolumeSpecName: "config-volume") pod "f2516773-15c8-4e3f-aee7-6267e1fe9e5a" (UID: "f2516773-15c8-4e3f-aee7-6267e1fe9e5a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.409310 4636 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.418324 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f2516773-15c8-4e3f-aee7-6267e1fe9e5a" (UID: "f2516773-15c8-4e3f-aee7-6267e1fe9e5a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.420098 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-kube-api-access-67j82" (OuterVolumeSpecName: "kube-api-access-67j82") pod "f2516773-15c8-4e3f-aee7-6267e1fe9e5a" (UID: "f2516773-15c8-4e3f-aee7-6267e1fe9e5a"). InnerVolumeSpecName "kube-api-access-67j82". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.511356 4636 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.511611 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67j82\" (UniqueName: \"kubernetes.io/projected/f2516773-15c8-4e3f-aee7-6267e1fe9e5a-kube-api-access-67j82\") on node \"crc\" DevicePath \"\"" Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.795040 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" event={"ID":"f2516773-15c8-4e3f-aee7-6267e1fe9e5a","Type":"ContainerDied","Data":"130ae85d0b6a4ca311e2e4fbfe4af890aa82cf5403290646df6b72d3ce5a1529"} Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.795444 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="130ae85d0b6a4ca311e2e4fbfe4af890aa82cf5403290646df6b72d3ce5a1529" Oct 02 22:30:03 crc kubenswrapper[4636]: I1002 22:30:03.795271 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324070-5nv2p" Oct 02 22:30:04 crc kubenswrapper[4636]: I1002 22:30:04.407366 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx"] Oct 02 22:30:04 crc kubenswrapper[4636]: I1002 22:30:04.415385 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324025-rthvx"] Oct 02 22:30:05 crc kubenswrapper[4636]: I1002 22:30:05.613827 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="115f6fcc-7689-4d2e-b995-09c5c884ae27" path="/var/lib/kubelet/pods/115f6fcc-7689-4d2e-b995-09c5c884ae27/volumes" Oct 02 22:30:06 crc kubenswrapper[4636]: I1002 22:30:06.742054 4636 scope.go:117] "RemoveContainer" containerID="af80d547d133989fcbc32a03b8eaeb17f07aa37b792cfa5565d9f2eddc50940c" Oct 02 22:30:23 crc kubenswrapper[4636]: I1002 22:30:23.117868 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:30:23 crc kubenswrapper[4636]: I1002 22:30:23.118598 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:30:53 crc kubenswrapper[4636]: I1002 22:30:53.117786 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:30:53 crc kubenswrapper[4636]: I1002 22:30:53.118384 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:31:23 crc kubenswrapper[4636]: I1002 22:31:23.117405 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:31:23 crc kubenswrapper[4636]: I1002 22:31:23.118083 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:31:23 crc kubenswrapper[4636]: I1002 22:31:23.118148 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 22:31:23 crc kubenswrapper[4636]: I1002 22:31:23.119113 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8ad0499b3003e641638fb66ed89dcc75dcd9f4dbbc09197f42075aef7e5eaf14"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 22:31:23 crc kubenswrapper[4636]: I1002 22:31:23.119239 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://8ad0499b3003e641638fb66ed89dcc75dcd9f4dbbc09197f42075aef7e5eaf14" gracePeriod=600 Oct 02 22:31:24 crc kubenswrapper[4636]: I1002 22:31:24.500785 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="8ad0499b3003e641638fb66ed89dcc75dcd9f4dbbc09197f42075aef7e5eaf14" exitCode=0 Oct 02 22:31:24 crc kubenswrapper[4636]: I1002 22:31:24.501304 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"8ad0499b3003e641638fb66ed89dcc75dcd9f4dbbc09197f42075aef7e5eaf14"} Oct 02 22:31:24 crc kubenswrapper[4636]: I1002 22:31:24.501364 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2"} Oct 02 22:31:24 crc kubenswrapper[4636]: I1002 22:31:24.501382 4636 scope.go:117] "RemoveContainer" containerID="1c905fd59a92fea5c140866acd1756e006b5dcaea2b34d24d3543c1d23200890" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.420092 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-knxrw"] Oct 02 22:33:17 crc kubenswrapper[4636]: E1002 22:33:17.421170 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2516773-15c8-4e3f-aee7-6267e1fe9e5a" containerName="collect-profiles" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.421183 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2516773-15c8-4e3f-aee7-6267e1fe9e5a" containerName="collect-profiles" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.421373 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2516773-15c8-4e3f-aee7-6267e1fe9e5a" containerName="collect-profiles" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.422705 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.434853 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-knxrw"] Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.520678 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8lct\" (UniqueName: \"kubernetes.io/projected/87cc61b6-c13e-46d7-a956-96702810dc28-kube-api-access-q8lct\") pod \"redhat-operators-knxrw\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.520794 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-catalog-content\") pod \"redhat-operators-knxrw\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.520898 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-utilities\") pod \"redhat-operators-knxrw\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.622489 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-catalog-content\") pod \"redhat-operators-knxrw\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.622535 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-utilities\") pod \"redhat-operators-knxrw\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.622641 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8lct\" (UniqueName: \"kubernetes.io/projected/87cc61b6-c13e-46d7-a956-96702810dc28-kube-api-access-q8lct\") pod \"redhat-operators-knxrw\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.623278 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-catalog-content\") pod \"redhat-operators-knxrw\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.623298 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-utilities\") pod \"redhat-operators-knxrw\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.644876 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8lct\" (UniqueName: \"kubernetes.io/projected/87cc61b6-c13e-46d7-a956-96702810dc28-kube-api-access-q8lct\") pod \"redhat-operators-knxrw\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:17 crc kubenswrapper[4636]: I1002 22:33:17.754186 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:18 crc kubenswrapper[4636]: I1002 22:33:18.272260 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-knxrw"] Oct 02 22:33:18 crc kubenswrapper[4636]: I1002 22:33:18.488531 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knxrw" event={"ID":"87cc61b6-c13e-46d7-a956-96702810dc28","Type":"ContainerStarted","Data":"50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785"} Oct 02 22:33:18 crc kubenswrapper[4636]: I1002 22:33:18.488871 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knxrw" event={"ID":"87cc61b6-c13e-46d7-a956-96702810dc28","Type":"ContainerStarted","Data":"be828c158029765276984187d5c7803bdecd8ec6b5ec23c7e0e033f667c80a78"} Oct 02 22:33:19 crc kubenswrapper[4636]: I1002 22:33:19.498871 4636 generic.go:334] "Generic (PLEG): container finished" podID="87cc61b6-c13e-46d7-a956-96702810dc28" containerID="50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785" exitCode=0 Oct 02 22:33:19 crc kubenswrapper[4636]: I1002 22:33:19.498974 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knxrw" event={"ID":"87cc61b6-c13e-46d7-a956-96702810dc28","Type":"ContainerDied","Data":"50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785"} Oct 02 22:33:19 crc kubenswrapper[4636]: I1002 22:33:19.501851 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 22:33:21 crc kubenswrapper[4636]: I1002 22:33:21.518263 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knxrw" event={"ID":"87cc61b6-c13e-46d7-a956-96702810dc28","Type":"ContainerStarted","Data":"a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773"} Oct 02 22:33:23 crc kubenswrapper[4636]: I1002 22:33:23.117691 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:33:23 crc kubenswrapper[4636]: I1002 22:33:23.118001 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:33:25 crc kubenswrapper[4636]: I1002 22:33:25.560582 4636 generic.go:334] "Generic (PLEG): container finished" podID="87cc61b6-c13e-46d7-a956-96702810dc28" containerID="a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773" exitCode=0 Oct 02 22:33:25 crc kubenswrapper[4636]: I1002 22:33:25.560657 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knxrw" event={"ID":"87cc61b6-c13e-46d7-a956-96702810dc28","Type":"ContainerDied","Data":"a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773"} Oct 02 22:33:26 crc kubenswrapper[4636]: I1002 22:33:26.570900 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knxrw" event={"ID":"87cc61b6-c13e-46d7-a956-96702810dc28","Type":"ContainerStarted","Data":"73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5"} Oct 02 22:33:26 crc kubenswrapper[4636]: I1002 22:33:26.592335 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-knxrw" podStartSLOduration=3.077564035 podStartE2EDuration="9.592316482s" podCreationTimestamp="2025-10-02 22:33:17 +0000 UTC" firstStartedPulling="2025-10-02 22:33:19.501637874 +0000 UTC m=+4190.824645893" lastFinishedPulling="2025-10-02 22:33:26.016390321 +0000 UTC m=+4197.339398340" observedRunningTime="2025-10-02 22:33:26.58779078 +0000 UTC m=+4197.910798799" watchObservedRunningTime="2025-10-02 22:33:26.592316482 +0000 UTC m=+4197.915324501" Oct 02 22:33:27 crc kubenswrapper[4636]: I1002 22:33:27.755169 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:27 crc kubenswrapper[4636]: I1002 22:33:27.756652 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:28 crc kubenswrapper[4636]: I1002 22:33:28.805194 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-knxrw" podUID="87cc61b6-c13e-46d7-a956-96702810dc28" containerName="registry-server" probeResult="failure" output=< Oct 02 22:33:28 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 22:33:28 crc kubenswrapper[4636]: > Oct 02 22:33:38 crc kubenswrapper[4636]: I1002 22:33:38.796853 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-knxrw" podUID="87cc61b6-c13e-46d7-a956-96702810dc28" containerName="registry-server" probeResult="failure" output=< Oct 02 22:33:38 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 22:33:38 crc kubenswrapper[4636]: > Oct 02 22:33:47 crc kubenswrapper[4636]: I1002 22:33:47.817241 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:47 crc kubenswrapper[4636]: I1002 22:33:47.870736 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:48 crc kubenswrapper[4636]: I1002 22:33:48.619349 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-knxrw"] Oct 02 22:33:49 crc kubenswrapper[4636]: I1002 22:33:49.775893 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-knxrw" podUID="87cc61b6-c13e-46d7-a956-96702810dc28" containerName="registry-server" containerID="cri-o://73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5" gracePeriod=2 Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.314575 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.456665 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-utilities\") pod \"87cc61b6-c13e-46d7-a956-96702810dc28\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.456723 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8lct\" (UniqueName: \"kubernetes.io/projected/87cc61b6-c13e-46d7-a956-96702810dc28-kube-api-access-q8lct\") pod \"87cc61b6-c13e-46d7-a956-96702810dc28\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.456959 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-catalog-content\") pod \"87cc61b6-c13e-46d7-a956-96702810dc28\" (UID: \"87cc61b6-c13e-46d7-a956-96702810dc28\") " Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.458011 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-utilities" (OuterVolumeSpecName: "utilities") pod "87cc61b6-c13e-46d7-a956-96702810dc28" (UID: "87cc61b6-c13e-46d7-a956-96702810dc28"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.462306 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cc61b6-c13e-46d7-a956-96702810dc28-kube-api-access-q8lct" (OuterVolumeSpecName: "kube-api-access-q8lct") pod "87cc61b6-c13e-46d7-a956-96702810dc28" (UID: "87cc61b6-c13e-46d7-a956-96702810dc28"). InnerVolumeSpecName "kube-api-access-q8lct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.537415 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "87cc61b6-c13e-46d7-a956-96702810dc28" (UID: "87cc61b6-c13e-46d7-a956-96702810dc28"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.559017 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.559046 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87cc61b6-c13e-46d7-a956-96702810dc28-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.559058 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8lct\" (UniqueName: \"kubernetes.io/projected/87cc61b6-c13e-46d7-a956-96702810dc28-kube-api-access-q8lct\") on node \"crc\" DevicePath \"\"" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.784951 4636 generic.go:334] "Generic (PLEG): container finished" podID="87cc61b6-c13e-46d7-a956-96702810dc28" containerID="73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5" exitCode=0 Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.785010 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-knxrw" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.785012 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knxrw" event={"ID":"87cc61b6-c13e-46d7-a956-96702810dc28","Type":"ContainerDied","Data":"73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5"} Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.786183 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knxrw" event={"ID":"87cc61b6-c13e-46d7-a956-96702810dc28","Type":"ContainerDied","Data":"be828c158029765276984187d5c7803bdecd8ec6b5ec23c7e0e033f667c80a78"} Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.786226 4636 scope.go:117] "RemoveContainer" containerID="73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.809468 4636 scope.go:117] "RemoveContainer" containerID="a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.822727 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-knxrw"] Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.830396 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-knxrw"] Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.847207 4636 scope.go:117] "RemoveContainer" containerID="50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.893888 4636 scope.go:117] "RemoveContainer" containerID="73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5" Oct 02 22:33:50 crc kubenswrapper[4636]: E1002 22:33:50.894505 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5\": container with ID starting with 73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5 not found: ID does not exist" containerID="73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.894609 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5"} err="failed to get container status \"73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5\": rpc error: code = NotFound desc = could not find container \"73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5\": container with ID starting with 73a91f91339ad082acf903317bb57f6d110250d8e4d2b29dcbf85a0f3376ffd5 not found: ID does not exist" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.894725 4636 scope.go:117] "RemoveContainer" containerID="a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773" Oct 02 22:33:50 crc kubenswrapper[4636]: E1002 22:33:50.895218 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773\": container with ID starting with a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773 not found: ID does not exist" containerID="a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.895255 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773"} err="failed to get container status \"a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773\": rpc error: code = NotFound desc = could not find container \"a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773\": container with ID starting with a41cfbab40ce9922ef93fe89814624e57a2f37da45892549c7e99556181fd773 not found: ID does not exist" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.895281 4636 scope.go:117] "RemoveContainer" containerID="50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785" Oct 02 22:33:50 crc kubenswrapper[4636]: E1002 22:33:50.895536 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785\": container with ID starting with 50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785 not found: ID does not exist" containerID="50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785" Oct 02 22:33:50 crc kubenswrapper[4636]: I1002 22:33:50.895565 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785"} err="failed to get container status \"50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785\": rpc error: code = NotFound desc = could not find container \"50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785\": container with ID starting with 50a8d357fa0f5dd566b8d180caac532b9b9dd865e24582ea6f69dd9fcee8e785 not found: ID does not exist" Oct 02 22:33:51 crc kubenswrapper[4636]: I1002 22:33:51.616777 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cc61b6-c13e-46d7-a956-96702810dc28" path="/var/lib/kubelet/pods/87cc61b6-c13e-46d7-a956-96702810dc28/volumes" Oct 02 22:33:53 crc kubenswrapper[4636]: I1002 22:33:53.117569 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:33:53 crc kubenswrapper[4636]: I1002 22:33:53.118912 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:34:23 crc kubenswrapper[4636]: I1002 22:34:23.117839 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:34:23 crc kubenswrapper[4636]: I1002 22:34:23.118416 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:34:23 crc kubenswrapper[4636]: I1002 22:34:23.118469 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 22:34:23 crc kubenswrapper[4636]: I1002 22:34:23.119607 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 22:34:23 crc kubenswrapper[4636]: I1002 22:34:23.119678 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" gracePeriod=600 Oct 02 22:34:23 crc kubenswrapper[4636]: E1002 22:34:23.254332 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:34:24 crc kubenswrapper[4636]: I1002 22:34:24.071504 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" exitCode=0 Oct 02 22:34:24 crc kubenswrapper[4636]: I1002 22:34:24.071549 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2"} Oct 02 22:34:24 crc kubenswrapper[4636]: I1002 22:34:24.071587 4636 scope.go:117] "RemoveContainer" containerID="8ad0499b3003e641638fb66ed89dcc75dcd9f4dbbc09197f42075aef7e5eaf14" Oct 02 22:34:24 crc kubenswrapper[4636]: I1002 22:34:24.072453 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:34:24 crc kubenswrapper[4636]: E1002 22:34:24.072957 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:34:38 crc kubenswrapper[4636]: I1002 22:34:38.604622 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:34:38 crc kubenswrapper[4636]: E1002 22:34:38.605226 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:34:49 crc kubenswrapper[4636]: I1002 22:34:49.610123 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:34:49 crc kubenswrapper[4636]: E1002 22:34:49.610824 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:35:02 crc kubenswrapper[4636]: I1002 22:35:02.603844 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:35:02 crc kubenswrapper[4636]: E1002 22:35:02.604701 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:35:16 crc kubenswrapper[4636]: I1002 22:35:16.604719 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:35:16 crc kubenswrapper[4636]: E1002 22:35:16.605686 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:35:29 crc kubenswrapper[4636]: I1002 22:35:29.618985 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:35:29 crc kubenswrapper[4636]: E1002 22:35:29.619807 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:35:40 crc kubenswrapper[4636]: I1002 22:35:40.604193 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:35:40 crc kubenswrapper[4636]: E1002 22:35:40.605139 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.483713 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fvdbl"] Oct 02 22:35:50 crc kubenswrapper[4636]: E1002 22:35:50.484930 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87cc61b6-c13e-46d7-a956-96702810dc28" containerName="extract-content" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.484949 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="87cc61b6-c13e-46d7-a956-96702810dc28" containerName="extract-content" Oct 02 22:35:50 crc kubenswrapper[4636]: E1002 22:35:50.485002 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87cc61b6-c13e-46d7-a956-96702810dc28" containerName="registry-server" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.485010 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="87cc61b6-c13e-46d7-a956-96702810dc28" containerName="registry-server" Oct 02 22:35:50 crc kubenswrapper[4636]: E1002 22:35:50.485025 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87cc61b6-c13e-46d7-a956-96702810dc28" containerName="extract-utilities" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.485033 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="87cc61b6-c13e-46d7-a956-96702810dc28" containerName="extract-utilities" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.485275 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="87cc61b6-c13e-46d7-a956-96702810dc28" containerName="registry-server" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.487190 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.506124 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fvdbl"] Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.660041 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wlrm\" (UniqueName: \"kubernetes.io/projected/4d3b8a2c-af4b-4373-8441-38e6f624cac2-kube-api-access-4wlrm\") pod \"certified-operators-fvdbl\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.660812 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-catalog-content\") pod \"certified-operators-fvdbl\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.660938 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-utilities\") pod \"certified-operators-fvdbl\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.762348 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-catalog-content\") pod \"certified-operators-fvdbl\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.762694 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-utilities\") pod \"certified-operators-fvdbl\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.762811 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-catalog-content\") pod \"certified-operators-fvdbl\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.763017 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wlrm\" (UniqueName: \"kubernetes.io/projected/4d3b8a2c-af4b-4373-8441-38e6f624cac2-kube-api-access-4wlrm\") pod \"certified-operators-fvdbl\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.763038 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-utilities\") pod \"certified-operators-fvdbl\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.789729 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wlrm\" (UniqueName: \"kubernetes.io/projected/4d3b8a2c-af4b-4373-8441-38e6f624cac2-kube-api-access-4wlrm\") pod \"certified-operators-fvdbl\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:35:50 crc kubenswrapper[4636]: I1002 22:35:50.810086 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:35:51 crc kubenswrapper[4636]: I1002 22:35:51.364414 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fvdbl"] Oct 02 22:35:51 crc kubenswrapper[4636]: I1002 22:35:51.950263 4636 generic.go:334] "Generic (PLEG): container finished" podID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" containerID="3fc0d4a0ea594036970905328089567a2542e341faa30735553988ee951739c5" exitCode=0 Oct 02 22:35:51 crc kubenswrapper[4636]: I1002 22:35:51.950636 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvdbl" event={"ID":"4d3b8a2c-af4b-4373-8441-38e6f624cac2","Type":"ContainerDied","Data":"3fc0d4a0ea594036970905328089567a2542e341faa30735553988ee951739c5"} Oct 02 22:35:51 crc kubenswrapper[4636]: I1002 22:35:51.950666 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvdbl" event={"ID":"4d3b8a2c-af4b-4373-8441-38e6f624cac2","Type":"ContainerStarted","Data":"d9f677f3f3726851bf1ef932eedd1ebd3b9b3fde423a6e226abe6f2f7539b057"} Oct 02 22:35:52 crc kubenswrapper[4636]: I1002 22:35:52.603922 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:35:52 crc kubenswrapper[4636]: E1002 22:35:52.604287 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:35:53 crc kubenswrapper[4636]: I1002 22:35:53.972526 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvdbl" event={"ID":"4d3b8a2c-af4b-4373-8441-38e6f624cac2","Type":"ContainerStarted","Data":"3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b"} Oct 02 22:35:54 crc kubenswrapper[4636]: I1002 22:35:54.986249 4636 generic.go:334] "Generic (PLEG): container finished" podID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" containerID="3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b" exitCode=0 Oct 02 22:35:54 crc kubenswrapper[4636]: I1002 22:35:54.986471 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvdbl" event={"ID":"4d3b8a2c-af4b-4373-8441-38e6f624cac2","Type":"ContainerDied","Data":"3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b"} Oct 02 22:35:56 crc kubenswrapper[4636]: I1002 22:35:56.001401 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvdbl" event={"ID":"4d3b8a2c-af4b-4373-8441-38e6f624cac2","Type":"ContainerStarted","Data":"ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838"} Oct 02 22:35:56 crc kubenswrapper[4636]: I1002 22:35:56.030588 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fvdbl" podStartSLOduration=2.606381989 podStartE2EDuration="6.030567112s" podCreationTimestamp="2025-10-02 22:35:50 +0000 UTC" firstStartedPulling="2025-10-02 22:35:51.952302444 +0000 UTC m=+4343.275310463" lastFinishedPulling="2025-10-02 22:35:55.376487567 +0000 UTC m=+4346.699495586" observedRunningTime="2025-10-02 22:35:56.022250169 +0000 UTC m=+4347.345258178" watchObservedRunningTime="2025-10-02 22:35:56.030567112 +0000 UTC m=+4347.353575141" Oct 02 22:36:00 crc kubenswrapper[4636]: I1002 22:36:00.810584 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:36:00 crc kubenswrapper[4636]: I1002 22:36:00.811100 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:36:00 crc kubenswrapper[4636]: I1002 22:36:00.866141 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:36:01 crc kubenswrapper[4636]: I1002 22:36:01.094532 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:36:01 crc kubenswrapper[4636]: I1002 22:36:01.155985 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fvdbl"] Oct 02 22:36:03 crc kubenswrapper[4636]: I1002 22:36:03.062709 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fvdbl" podUID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" containerName="registry-server" containerID="cri-o://ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838" gracePeriod=2 Oct 02 22:36:03 crc kubenswrapper[4636]: I1002 22:36:03.579271 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:36:03 crc kubenswrapper[4636]: I1002 22:36:03.643264 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-catalog-content\") pod \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " Oct 02 22:36:03 crc kubenswrapper[4636]: I1002 22:36:03.643433 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-utilities\") pod \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " Oct 02 22:36:03 crc kubenswrapper[4636]: I1002 22:36:03.643621 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wlrm\" (UniqueName: \"kubernetes.io/projected/4d3b8a2c-af4b-4373-8441-38e6f624cac2-kube-api-access-4wlrm\") pod \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\" (UID: \"4d3b8a2c-af4b-4373-8441-38e6f624cac2\") " Oct 02 22:36:03 crc kubenswrapper[4636]: I1002 22:36:03.644419 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-utilities" (OuterVolumeSpecName: "utilities") pod "4d3b8a2c-af4b-4373-8441-38e6f624cac2" (UID: "4d3b8a2c-af4b-4373-8441-38e6f624cac2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:36:03 crc kubenswrapper[4636]: I1002 22:36:03.656031 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d3b8a2c-af4b-4373-8441-38e6f624cac2-kube-api-access-4wlrm" (OuterVolumeSpecName: "kube-api-access-4wlrm") pod "4d3b8a2c-af4b-4373-8441-38e6f624cac2" (UID: "4d3b8a2c-af4b-4373-8441-38e6f624cac2"). InnerVolumeSpecName "kube-api-access-4wlrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:36:03 crc kubenswrapper[4636]: I1002 22:36:03.745370 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wlrm\" (UniqueName: \"kubernetes.io/projected/4d3b8a2c-af4b-4373-8441-38e6f624cac2-kube-api-access-4wlrm\") on node \"crc\" DevicePath \"\"" Oct 02 22:36:03 crc kubenswrapper[4636]: I1002 22:36:03.745410 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.073143 4636 generic.go:334] "Generic (PLEG): container finished" podID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" containerID="ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838" exitCode=0 Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.073202 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvdbl" event={"ID":"4d3b8a2c-af4b-4373-8441-38e6f624cac2","Type":"ContainerDied","Data":"ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838"} Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.073221 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvdbl" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.073240 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvdbl" event={"ID":"4d3b8a2c-af4b-4373-8441-38e6f624cac2","Type":"ContainerDied","Data":"d9f677f3f3726851bf1ef932eedd1ebd3b9b3fde423a6e226abe6f2f7539b057"} Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.073268 4636 scope.go:117] "RemoveContainer" containerID="ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.097591 4636 scope.go:117] "RemoveContainer" containerID="3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.111276 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d3b8a2c-af4b-4373-8441-38e6f624cac2" (UID: "4d3b8a2c-af4b-4373-8441-38e6f624cac2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.121785 4636 scope.go:117] "RemoveContainer" containerID="3fc0d4a0ea594036970905328089567a2542e341faa30735553988ee951739c5" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.152271 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d3b8a2c-af4b-4373-8441-38e6f624cac2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.168631 4636 scope.go:117] "RemoveContainer" containerID="ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838" Oct 02 22:36:04 crc kubenswrapper[4636]: E1002 22:36:04.168997 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838\": container with ID starting with ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838 not found: ID does not exist" containerID="ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.169034 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838"} err="failed to get container status \"ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838\": rpc error: code = NotFound desc = could not find container \"ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838\": container with ID starting with ec099024994e79016056e58eafe6af81854803dee4ed24a60f722d47a0ee3838 not found: ID does not exist" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.169057 4636 scope.go:117] "RemoveContainer" containerID="3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b" Oct 02 22:36:04 crc kubenswrapper[4636]: E1002 22:36:04.169316 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b\": container with ID starting with 3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b not found: ID does not exist" containerID="3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.169344 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b"} err="failed to get container status \"3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b\": rpc error: code = NotFound desc = could not find container \"3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b\": container with ID starting with 3ce43bdc74eda32c7168b2e7069418ef49f54c689321a956b1ef6eb95cd00c7b not found: ID does not exist" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.169361 4636 scope.go:117] "RemoveContainer" containerID="3fc0d4a0ea594036970905328089567a2542e341faa30735553988ee951739c5" Oct 02 22:36:04 crc kubenswrapper[4636]: E1002 22:36:04.169626 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fc0d4a0ea594036970905328089567a2542e341faa30735553988ee951739c5\": container with ID starting with 3fc0d4a0ea594036970905328089567a2542e341faa30735553988ee951739c5 not found: ID does not exist" containerID="3fc0d4a0ea594036970905328089567a2542e341faa30735553988ee951739c5" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.169672 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fc0d4a0ea594036970905328089567a2542e341faa30735553988ee951739c5"} err="failed to get container status \"3fc0d4a0ea594036970905328089567a2542e341faa30735553988ee951739c5\": rpc error: code = NotFound desc = could not find container \"3fc0d4a0ea594036970905328089567a2542e341faa30735553988ee951739c5\": container with ID starting with 3fc0d4a0ea594036970905328089567a2542e341faa30735553988ee951739c5 not found: ID does not exist" Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.438180 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fvdbl"] Oct 02 22:36:04 crc kubenswrapper[4636]: I1002 22:36:04.452392 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fvdbl"] Oct 02 22:36:05 crc kubenswrapper[4636]: I1002 22:36:05.621026 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" path="/var/lib/kubelet/pods/4d3b8a2c-af4b-4373-8441-38e6f624cac2/volumes" Oct 02 22:36:06 crc kubenswrapper[4636]: I1002 22:36:06.603385 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:36:06 crc kubenswrapper[4636]: E1002 22:36:06.603789 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.337291 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-csqvt"] Oct 02 22:36:12 crc kubenswrapper[4636]: E1002 22:36:12.338338 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" containerName="registry-server" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.338354 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" containerName="registry-server" Oct 02 22:36:12 crc kubenswrapper[4636]: E1002 22:36:12.338384 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" containerName="extract-content" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.338391 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" containerName="extract-content" Oct 02 22:36:12 crc kubenswrapper[4636]: E1002 22:36:12.338423 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" containerName="extract-utilities" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.338431 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" containerName="extract-utilities" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.338641 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d3b8a2c-af4b-4373-8441-38e6f624cac2" containerName="registry-server" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.340385 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.358447 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-csqvt"] Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.506929 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4cjg\" (UniqueName: \"kubernetes.io/projected/6301da03-b784-4544-a9ff-7b268b5b30db-kube-api-access-v4cjg\") pod \"community-operators-csqvt\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.507026 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-catalog-content\") pod \"community-operators-csqvt\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.507207 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-utilities\") pod \"community-operators-csqvt\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.608368 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-catalog-content\") pod \"community-operators-csqvt\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.608522 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-utilities\") pod \"community-operators-csqvt\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.608582 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4cjg\" (UniqueName: \"kubernetes.io/projected/6301da03-b784-4544-a9ff-7b268b5b30db-kube-api-access-v4cjg\") pod \"community-operators-csqvt\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.608965 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-utilities\") pod \"community-operators-csqvt\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.608983 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-catalog-content\") pod \"community-operators-csqvt\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.626582 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4cjg\" (UniqueName: \"kubernetes.io/projected/6301da03-b784-4544-a9ff-7b268b5b30db-kube-api-access-v4cjg\") pod \"community-operators-csqvt\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:12 crc kubenswrapper[4636]: I1002 22:36:12.656596 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:13 crc kubenswrapper[4636]: I1002 22:36:13.185293 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-csqvt"] Oct 02 22:36:14 crc kubenswrapper[4636]: I1002 22:36:14.166989 4636 generic.go:334] "Generic (PLEG): container finished" podID="6301da03-b784-4544-a9ff-7b268b5b30db" containerID="3b3923c52bdedbe5afdc73f333c8377ad5f9d8493f9f184165798c3d817e109e" exitCode=0 Oct 02 22:36:14 crc kubenswrapper[4636]: I1002 22:36:14.167043 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-csqvt" event={"ID":"6301da03-b784-4544-a9ff-7b268b5b30db","Type":"ContainerDied","Data":"3b3923c52bdedbe5afdc73f333c8377ad5f9d8493f9f184165798c3d817e109e"} Oct 02 22:36:14 crc kubenswrapper[4636]: I1002 22:36:14.167266 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-csqvt" event={"ID":"6301da03-b784-4544-a9ff-7b268b5b30db","Type":"ContainerStarted","Data":"3507c86e80ae0b579fca6483bf88342651feb575ccda81348058ae7fcbcbada5"} Oct 02 22:36:15 crc kubenswrapper[4636]: I1002 22:36:15.176987 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-csqvt" event={"ID":"6301da03-b784-4544-a9ff-7b268b5b30db","Type":"ContainerStarted","Data":"0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e"} Oct 02 22:36:17 crc kubenswrapper[4636]: I1002 22:36:17.195261 4636 generic.go:334] "Generic (PLEG): container finished" podID="6301da03-b784-4544-a9ff-7b268b5b30db" containerID="0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e" exitCode=0 Oct 02 22:36:17 crc kubenswrapper[4636]: I1002 22:36:17.195348 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-csqvt" event={"ID":"6301da03-b784-4544-a9ff-7b268b5b30db","Type":"ContainerDied","Data":"0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e"} Oct 02 22:36:18 crc kubenswrapper[4636]: I1002 22:36:18.207602 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-csqvt" event={"ID":"6301da03-b784-4544-a9ff-7b268b5b30db","Type":"ContainerStarted","Data":"e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984"} Oct 02 22:36:18 crc kubenswrapper[4636]: I1002 22:36:18.240950 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-csqvt" podStartSLOduration=2.721329322 podStartE2EDuration="6.240934672s" podCreationTimestamp="2025-10-02 22:36:12 +0000 UTC" firstStartedPulling="2025-10-02 22:36:14.169262731 +0000 UTC m=+4365.492270750" lastFinishedPulling="2025-10-02 22:36:17.688868081 +0000 UTC m=+4369.011876100" observedRunningTime="2025-10-02 22:36:18.236210195 +0000 UTC m=+4369.559218214" watchObservedRunningTime="2025-10-02 22:36:18.240934672 +0000 UTC m=+4369.563942691" Oct 02 22:36:19 crc kubenswrapper[4636]: I1002 22:36:19.610729 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:36:19 crc kubenswrapper[4636]: E1002 22:36:19.611409 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:36:22 crc kubenswrapper[4636]: I1002 22:36:22.657698 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:22 crc kubenswrapper[4636]: I1002 22:36:22.658237 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:22 crc kubenswrapper[4636]: I1002 22:36:22.705594 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:23 crc kubenswrapper[4636]: I1002 22:36:23.302619 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:23 crc kubenswrapper[4636]: I1002 22:36:23.348648 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-csqvt"] Oct 02 22:36:25 crc kubenswrapper[4636]: I1002 22:36:25.273928 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-csqvt" podUID="6301da03-b784-4544-a9ff-7b268b5b30db" containerName="registry-server" containerID="cri-o://e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984" gracePeriod=2 Oct 02 22:36:25 crc kubenswrapper[4636]: I1002 22:36:25.856319 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.002294 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4cjg\" (UniqueName: \"kubernetes.io/projected/6301da03-b784-4544-a9ff-7b268b5b30db-kube-api-access-v4cjg\") pod \"6301da03-b784-4544-a9ff-7b268b5b30db\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.002388 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-utilities\") pod \"6301da03-b784-4544-a9ff-7b268b5b30db\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.002455 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-catalog-content\") pod \"6301da03-b784-4544-a9ff-7b268b5b30db\" (UID: \"6301da03-b784-4544-a9ff-7b268b5b30db\") " Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.003387 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-utilities" (OuterVolumeSpecName: "utilities") pod "6301da03-b784-4544-a9ff-7b268b5b30db" (UID: "6301da03-b784-4544-a9ff-7b268b5b30db"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.010108 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6301da03-b784-4544-a9ff-7b268b5b30db-kube-api-access-v4cjg" (OuterVolumeSpecName: "kube-api-access-v4cjg") pod "6301da03-b784-4544-a9ff-7b268b5b30db" (UID: "6301da03-b784-4544-a9ff-7b268b5b30db"). InnerVolumeSpecName "kube-api-access-v4cjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.105029 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4cjg\" (UniqueName: \"kubernetes.io/projected/6301da03-b784-4544-a9ff-7b268b5b30db-kube-api-access-v4cjg\") on node \"crc\" DevicePath \"\"" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.105280 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.285827 4636 generic.go:334] "Generic (PLEG): container finished" podID="6301da03-b784-4544-a9ff-7b268b5b30db" containerID="e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984" exitCode=0 Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.286822 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-csqvt" event={"ID":"6301da03-b784-4544-a9ff-7b268b5b30db","Type":"ContainerDied","Data":"e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984"} Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.286868 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-csqvt" event={"ID":"6301da03-b784-4544-a9ff-7b268b5b30db","Type":"ContainerDied","Data":"3507c86e80ae0b579fca6483bf88342651feb575ccda81348058ae7fcbcbada5"} Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.286889 4636 scope.go:117] "RemoveContainer" containerID="e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.287034 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-csqvt" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.312527 4636 scope.go:117] "RemoveContainer" containerID="0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.338859 4636 scope.go:117] "RemoveContainer" containerID="3b3923c52bdedbe5afdc73f333c8377ad5f9d8493f9f184165798c3d817e109e" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.387247 4636 scope.go:117] "RemoveContainer" containerID="e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984" Oct 02 22:36:26 crc kubenswrapper[4636]: E1002 22:36:26.388275 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984\": container with ID starting with e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984 not found: ID does not exist" containerID="e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.388415 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984"} err="failed to get container status \"e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984\": rpc error: code = NotFound desc = could not find container \"e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984\": container with ID starting with e1daf67fa2eecf6cf64dcb5f18a8ad3e679a5efa62befaf1dcba095bbdcbd984 not found: ID does not exist" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.388536 4636 scope.go:117] "RemoveContainer" containerID="0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e" Oct 02 22:36:26 crc kubenswrapper[4636]: E1002 22:36:26.389092 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e\": container with ID starting with 0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e not found: ID does not exist" containerID="0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.389126 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e"} err="failed to get container status \"0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e\": rpc error: code = NotFound desc = could not find container \"0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e\": container with ID starting with 0f6b89c2933d5493ed4d2d8acca8535e265fa3d3961eab4691bb101dd28aca1e not found: ID does not exist" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.389151 4636 scope.go:117] "RemoveContainer" containerID="3b3923c52bdedbe5afdc73f333c8377ad5f9d8493f9f184165798c3d817e109e" Oct 02 22:36:26 crc kubenswrapper[4636]: E1002 22:36:26.389463 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b3923c52bdedbe5afdc73f333c8377ad5f9d8493f9f184165798c3d817e109e\": container with ID starting with 3b3923c52bdedbe5afdc73f333c8377ad5f9d8493f9f184165798c3d817e109e not found: ID does not exist" containerID="3b3923c52bdedbe5afdc73f333c8377ad5f9d8493f9f184165798c3d817e109e" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.389572 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b3923c52bdedbe5afdc73f333c8377ad5f9d8493f9f184165798c3d817e109e"} err="failed to get container status \"3b3923c52bdedbe5afdc73f333c8377ad5f9d8493f9f184165798c3d817e109e\": rpc error: code = NotFound desc = could not find container \"3b3923c52bdedbe5afdc73f333c8377ad5f9d8493f9f184165798c3d817e109e\": container with ID starting with 3b3923c52bdedbe5afdc73f333c8377ad5f9d8493f9f184165798c3d817e109e not found: ID does not exist" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.845669 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6301da03-b784-4544-a9ff-7b268b5b30db" (UID: "6301da03-b784-4544-a9ff-7b268b5b30db"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.922143 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6301da03-b784-4544-a9ff-7b268b5b30db-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.926676 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-csqvt"] Oct 02 22:36:26 crc kubenswrapper[4636]: I1002 22:36:26.935688 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-csqvt"] Oct 02 22:36:27 crc kubenswrapper[4636]: I1002 22:36:27.614924 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6301da03-b784-4544-a9ff-7b268b5b30db" path="/var/lib/kubelet/pods/6301da03-b784-4544-a9ff-7b268b5b30db/volumes" Oct 02 22:36:32 crc kubenswrapper[4636]: I1002 22:36:32.603783 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:36:32 crc kubenswrapper[4636]: E1002 22:36:32.604555 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:36:44 crc kubenswrapper[4636]: I1002 22:36:44.603927 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:36:44 crc kubenswrapper[4636]: E1002 22:36:44.604731 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:36:55 crc kubenswrapper[4636]: I1002 22:36:55.604067 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:36:55 crc kubenswrapper[4636]: E1002 22:36:55.604869 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.195180 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jl8sw"] Oct 02 22:36:57 crc kubenswrapper[4636]: E1002 22:36:57.195880 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6301da03-b784-4544-a9ff-7b268b5b30db" containerName="extract-utilities" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.195893 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6301da03-b784-4544-a9ff-7b268b5b30db" containerName="extract-utilities" Oct 02 22:36:57 crc kubenswrapper[4636]: E1002 22:36:57.195915 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6301da03-b784-4544-a9ff-7b268b5b30db" containerName="extract-content" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.195921 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6301da03-b784-4544-a9ff-7b268b5b30db" containerName="extract-content" Oct 02 22:36:57 crc kubenswrapper[4636]: E1002 22:36:57.195932 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6301da03-b784-4544-a9ff-7b268b5b30db" containerName="registry-server" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.195963 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="6301da03-b784-4544-a9ff-7b268b5b30db" containerName="registry-server" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.196187 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="6301da03-b784-4544-a9ff-7b268b5b30db" containerName="registry-server" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.197614 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.205843 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jl8sw"] Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.318199 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-catalog-content\") pod \"redhat-marketplace-jl8sw\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.318250 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-utilities\") pod \"redhat-marketplace-jl8sw\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.318298 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjnxr\" (UniqueName: \"kubernetes.io/projected/5fd07e88-eb0f-4f93-8828-24856c3563a2-kube-api-access-qjnxr\") pod \"redhat-marketplace-jl8sw\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.419859 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-catalog-content\") pod \"redhat-marketplace-jl8sw\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.420093 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-utilities\") pod \"redhat-marketplace-jl8sw\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.420225 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjnxr\" (UniqueName: \"kubernetes.io/projected/5fd07e88-eb0f-4f93-8828-24856c3563a2-kube-api-access-qjnxr\") pod \"redhat-marketplace-jl8sw\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.420347 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-catalog-content\") pod \"redhat-marketplace-jl8sw\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.420727 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-utilities\") pod \"redhat-marketplace-jl8sw\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.438662 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjnxr\" (UniqueName: \"kubernetes.io/projected/5fd07e88-eb0f-4f93-8828-24856c3563a2-kube-api-access-qjnxr\") pod \"redhat-marketplace-jl8sw\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.527588 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:36:57 crc kubenswrapper[4636]: I1002 22:36:57.995780 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jl8sw"] Oct 02 22:36:58 crc kubenswrapper[4636]: I1002 22:36:58.556943 4636 generic.go:334] "Generic (PLEG): container finished" podID="5fd07e88-eb0f-4f93-8828-24856c3563a2" containerID="671b5335b43b182fd26874f01cbab4a727bb452b2ac96444feeb5acf52af02d6" exitCode=0 Oct 02 22:36:58 crc kubenswrapper[4636]: I1002 22:36:58.557005 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jl8sw" event={"ID":"5fd07e88-eb0f-4f93-8828-24856c3563a2","Type":"ContainerDied","Data":"671b5335b43b182fd26874f01cbab4a727bb452b2ac96444feeb5acf52af02d6"} Oct 02 22:36:58 crc kubenswrapper[4636]: I1002 22:36:58.557309 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jl8sw" event={"ID":"5fd07e88-eb0f-4f93-8828-24856c3563a2","Type":"ContainerStarted","Data":"7779dfeedc721e0d8511d6223da11181429d0b63b52e09612db1b36877f061c9"} Oct 02 22:36:59 crc kubenswrapper[4636]: I1002 22:36:59.567509 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jl8sw" event={"ID":"5fd07e88-eb0f-4f93-8828-24856c3563a2","Type":"ContainerStarted","Data":"ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d"} Oct 02 22:37:00 crc kubenswrapper[4636]: I1002 22:37:00.577922 4636 generic.go:334] "Generic (PLEG): container finished" podID="5fd07e88-eb0f-4f93-8828-24856c3563a2" containerID="ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d" exitCode=0 Oct 02 22:37:00 crc kubenswrapper[4636]: I1002 22:37:00.577975 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jl8sw" event={"ID":"5fd07e88-eb0f-4f93-8828-24856c3563a2","Type":"ContainerDied","Data":"ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d"} Oct 02 22:37:01 crc kubenswrapper[4636]: I1002 22:37:01.588245 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jl8sw" event={"ID":"5fd07e88-eb0f-4f93-8828-24856c3563a2","Type":"ContainerStarted","Data":"ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8"} Oct 02 22:37:07 crc kubenswrapper[4636]: I1002 22:37:07.527877 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:37:07 crc kubenswrapper[4636]: I1002 22:37:07.529028 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:37:07 crc kubenswrapper[4636]: I1002 22:37:07.590628 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:37:07 crc kubenswrapper[4636]: I1002 22:37:07.612649 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jl8sw" podStartSLOduration=8.219850576 podStartE2EDuration="10.612628725s" podCreationTimestamp="2025-10-02 22:36:57 +0000 UTC" firstStartedPulling="2025-10-02 22:36:58.559991312 +0000 UTC m=+4409.882999331" lastFinishedPulling="2025-10-02 22:37:00.952769461 +0000 UTC m=+4412.275777480" observedRunningTime="2025-10-02 22:37:01.610123433 +0000 UTC m=+4412.933131442" watchObservedRunningTime="2025-10-02 22:37:07.612628725 +0000 UTC m=+4418.935636744" Oct 02 22:37:07 crc kubenswrapper[4636]: I1002 22:37:07.691429 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:37:07 crc kubenswrapper[4636]: I1002 22:37:07.837318 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jl8sw"] Oct 02 22:37:09 crc kubenswrapper[4636]: I1002 22:37:09.610006 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:37:09 crc kubenswrapper[4636]: E1002 22:37:09.610326 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:37:09 crc kubenswrapper[4636]: I1002 22:37:09.663494 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jl8sw" podUID="5fd07e88-eb0f-4f93-8828-24856c3563a2" containerName="registry-server" containerID="cri-o://ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8" gracePeriod=2 Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.206719 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.376429 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-utilities\") pod \"5fd07e88-eb0f-4f93-8828-24856c3563a2\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.376629 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjnxr\" (UniqueName: \"kubernetes.io/projected/5fd07e88-eb0f-4f93-8828-24856c3563a2-kube-api-access-qjnxr\") pod \"5fd07e88-eb0f-4f93-8828-24856c3563a2\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.376686 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-catalog-content\") pod \"5fd07e88-eb0f-4f93-8828-24856c3563a2\" (UID: \"5fd07e88-eb0f-4f93-8828-24856c3563a2\") " Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.377411 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-utilities" (OuterVolumeSpecName: "utilities") pod "5fd07e88-eb0f-4f93-8828-24856c3563a2" (UID: "5fd07e88-eb0f-4f93-8828-24856c3563a2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.392258 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fd07e88-eb0f-4f93-8828-24856c3563a2-kube-api-access-qjnxr" (OuterVolumeSpecName: "kube-api-access-qjnxr") pod "5fd07e88-eb0f-4f93-8828-24856c3563a2" (UID: "5fd07e88-eb0f-4f93-8828-24856c3563a2"). InnerVolumeSpecName "kube-api-access-qjnxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.394921 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5fd07e88-eb0f-4f93-8828-24856c3563a2" (UID: "5fd07e88-eb0f-4f93-8828-24856c3563a2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.395833 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.395856 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjnxr\" (UniqueName: \"kubernetes.io/projected/5fd07e88-eb0f-4f93-8828-24856c3563a2-kube-api-access-qjnxr\") on node \"crc\" DevicePath \"\"" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.395865 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fd07e88-eb0f-4f93-8828-24856c3563a2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.674504 4636 generic.go:334] "Generic (PLEG): container finished" podID="5fd07e88-eb0f-4f93-8828-24856c3563a2" containerID="ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8" exitCode=0 Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.674545 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jl8sw" event={"ID":"5fd07e88-eb0f-4f93-8828-24856c3563a2","Type":"ContainerDied","Data":"ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8"} Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.674570 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jl8sw" event={"ID":"5fd07e88-eb0f-4f93-8828-24856c3563a2","Type":"ContainerDied","Data":"7779dfeedc721e0d8511d6223da11181429d0b63b52e09612db1b36877f061c9"} Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.674586 4636 scope.go:117] "RemoveContainer" containerID="ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.674733 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jl8sw" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.694667 4636 scope.go:117] "RemoveContainer" containerID="ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.710888 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jl8sw"] Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.719858 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jl8sw"] Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.721350 4636 scope.go:117] "RemoveContainer" containerID="671b5335b43b182fd26874f01cbab4a727bb452b2ac96444feeb5acf52af02d6" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.783320 4636 scope.go:117] "RemoveContainer" containerID="ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8" Oct 02 22:37:10 crc kubenswrapper[4636]: E1002 22:37:10.783715 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8\": container with ID starting with ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8 not found: ID does not exist" containerID="ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.783741 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8"} err="failed to get container status \"ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8\": rpc error: code = NotFound desc = could not find container \"ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8\": container with ID starting with ced137be338710a72c3ec19bb7c18234f80fc1ec1cbf65c43ba37a39964b89f8 not found: ID does not exist" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.783850 4636 scope.go:117] "RemoveContainer" containerID="ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d" Oct 02 22:37:10 crc kubenswrapper[4636]: E1002 22:37:10.784071 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d\": container with ID starting with ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d not found: ID does not exist" containerID="ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.784096 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d"} err="failed to get container status \"ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d\": rpc error: code = NotFound desc = could not find container \"ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d\": container with ID starting with ba7e556a721338e5c0d51055a31803acad4b847e08e8d529ad9677e22bb1416d not found: ID does not exist" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.784109 4636 scope.go:117] "RemoveContainer" containerID="671b5335b43b182fd26874f01cbab4a727bb452b2ac96444feeb5acf52af02d6" Oct 02 22:37:10 crc kubenswrapper[4636]: E1002 22:37:10.784361 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"671b5335b43b182fd26874f01cbab4a727bb452b2ac96444feeb5acf52af02d6\": container with ID starting with 671b5335b43b182fd26874f01cbab4a727bb452b2ac96444feeb5acf52af02d6 not found: ID does not exist" containerID="671b5335b43b182fd26874f01cbab4a727bb452b2ac96444feeb5acf52af02d6" Oct 02 22:37:10 crc kubenswrapper[4636]: I1002 22:37:10.784390 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"671b5335b43b182fd26874f01cbab4a727bb452b2ac96444feeb5acf52af02d6"} err="failed to get container status \"671b5335b43b182fd26874f01cbab4a727bb452b2ac96444feeb5acf52af02d6\": rpc error: code = NotFound desc = could not find container \"671b5335b43b182fd26874f01cbab4a727bb452b2ac96444feeb5acf52af02d6\": container with ID starting with 671b5335b43b182fd26874f01cbab4a727bb452b2ac96444feeb5acf52af02d6 not found: ID does not exist" Oct 02 22:37:11 crc kubenswrapper[4636]: I1002 22:37:11.612734 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fd07e88-eb0f-4f93-8828-24856c3563a2" path="/var/lib/kubelet/pods/5fd07e88-eb0f-4f93-8828-24856c3563a2/volumes" Oct 02 22:37:21 crc kubenswrapper[4636]: I1002 22:37:21.603547 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:37:21 crc kubenswrapper[4636]: E1002 22:37:21.604351 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:37:32 crc kubenswrapper[4636]: I1002 22:37:32.603805 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:37:32 crc kubenswrapper[4636]: E1002 22:37:32.604695 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:37:47 crc kubenswrapper[4636]: I1002 22:37:47.604086 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:37:47 crc kubenswrapper[4636]: E1002 22:37:47.605163 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:38:00 crc kubenswrapper[4636]: I1002 22:38:00.604237 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:38:00 crc kubenswrapper[4636]: E1002 22:38:00.606338 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:38:11 crc kubenswrapper[4636]: I1002 22:38:11.604080 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:38:11 crc kubenswrapper[4636]: E1002 22:38:11.604943 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:38:23 crc kubenswrapper[4636]: I1002 22:38:23.604575 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:38:23 crc kubenswrapper[4636]: E1002 22:38:23.606509 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:38:38 crc kubenswrapper[4636]: I1002 22:38:38.604586 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:38:38 crc kubenswrapper[4636]: E1002 22:38:38.605160 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:38:51 crc kubenswrapper[4636]: I1002 22:38:51.603460 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:38:51 crc kubenswrapper[4636]: E1002 22:38:51.604282 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:39:04 crc kubenswrapper[4636]: I1002 22:39:04.604418 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:39:04 crc kubenswrapper[4636]: E1002 22:39:04.605178 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:39:18 crc kubenswrapper[4636]: I1002 22:39:18.604826 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:39:18 crc kubenswrapper[4636]: E1002 22:39:18.606814 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:39:30 crc kubenswrapper[4636]: I1002 22:39:30.605322 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:39:30 crc kubenswrapper[4636]: I1002 22:39:30.990920 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"d07658d14c348f924c225743c75fcd5b94b9d94c43641ed3fd4c0cbf7a1d8bef"} Oct 02 22:41:53 crc kubenswrapper[4636]: I1002 22:41:53.117944 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:41:53 crc kubenswrapper[4636]: I1002 22:41:53.118462 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:42:23 crc kubenswrapper[4636]: I1002 22:42:23.117651 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:42:23 crc kubenswrapper[4636]: I1002 22:42:23.118424 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:42:53 crc kubenswrapper[4636]: I1002 22:42:53.117524 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:42:53 crc kubenswrapper[4636]: I1002 22:42:53.118121 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:42:53 crc kubenswrapper[4636]: I1002 22:42:53.118172 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 22:42:53 crc kubenswrapper[4636]: I1002 22:42:53.118930 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d07658d14c348f924c225743c75fcd5b94b9d94c43641ed3fd4c0cbf7a1d8bef"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 22:42:53 crc kubenswrapper[4636]: I1002 22:42:53.119004 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://d07658d14c348f924c225743c75fcd5b94b9d94c43641ed3fd4c0cbf7a1d8bef" gracePeriod=600 Oct 02 22:42:53 crc kubenswrapper[4636]: I1002 22:42:53.852294 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="d07658d14c348f924c225743c75fcd5b94b9d94c43641ed3fd4c0cbf7a1d8bef" exitCode=0 Oct 02 22:42:53 crc kubenswrapper[4636]: I1002 22:42:53.852378 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"d07658d14c348f924c225743c75fcd5b94b9d94c43641ed3fd4c0cbf7a1d8bef"} Oct 02 22:42:53 crc kubenswrapper[4636]: I1002 22:42:53.853134 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719"} Oct 02 22:42:53 crc kubenswrapper[4636]: I1002 22:42:53.853163 4636 scope.go:117] "RemoveContainer" containerID="a71c2d035bf8893b8f1d2cb2f85e080a7721c0259154af70ca3fec119280cbe2" Oct 02 22:44:12 crc kubenswrapper[4636]: I1002 22:44:12.592934 4636 generic.go:334] "Generic (PLEG): container finished" podID="388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" containerID="ac56fb672c2604ea33a7b9b2768555e0e046aeaf677cfb13f5fe159415413629" exitCode=1 Oct 02 22:44:12 crc kubenswrapper[4636]: I1002 22:44:12.593007 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab","Type":"ContainerDied","Data":"ac56fb672c2604ea33a7b9b2768555e0e046aeaf677cfb13f5fe159415413629"} Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.050024 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.105528 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config\") pod \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.105727 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-temporary\") pod \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.105785 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config-secret\") pod \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.105825 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dtxc\" (UniqueName: \"kubernetes.io/projected/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-kube-api-access-8dtxc\") pod \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.105850 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ssh-key\") pod \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.105864 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-config-data\") pod \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.105897 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-workdir\") pod \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.105944 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.105978 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ca-certs\") pod \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\" (UID: \"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab\") " Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.106374 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" (UID: "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.106950 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-config-data" (OuterVolumeSpecName: "config-data") pod "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" (UID: "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.117912 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" (UID: "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.118927 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-kube-api-access-8dtxc" (OuterVolumeSpecName: "kube-api-access-8dtxc") pod "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" (UID: "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab"). InnerVolumeSpecName "kube-api-access-8dtxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.122580 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "test-operator-logs") pod "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" (UID: "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.145720 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" (UID: "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.162929 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" (UID: "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.168667 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" (UID: "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.168935 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" (UID: "388bd5ff-d88f-4ea6-83e0-0ae99fc188ab"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.213063 4636 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.213094 4636 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.213104 4636 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.213114 4636 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.213126 4636 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.213138 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dtxc\" (UniqueName: \"kubernetes.io/projected/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-kube-api-access-8dtxc\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.213146 4636 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.213155 4636 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.213167 4636 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/388bd5ff-d88f-4ea6-83e0-0ae99fc188ab-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.232932 4636 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.314958 4636 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.613467 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"388bd5ff-d88f-4ea6-83e0-0ae99fc188ab","Type":"ContainerDied","Data":"79e227e50ea8f0855a8ab93712bb83d830e94c31176fb510fe2927af86bd12f7"} Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.613742 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79e227e50ea8f0855a8ab93712bb83d830e94c31176fb510fe2927af86bd12f7" Oct 02 22:44:14 crc kubenswrapper[4636]: I1002 22:44:14.613868 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.670465 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mpnqg"] Oct 02 22:44:17 crc kubenswrapper[4636]: E1002 22:44:17.671526 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fd07e88-eb0f-4f93-8828-24856c3563a2" containerName="extract-content" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.671542 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fd07e88-eb0f-4f93-8828-24856c3563a2" containerName="extract-content" Oct 02 22:44:17 crc kubenswrapper[4636]: E1002 22:44:17.671559 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fd07e88-eb0f-4f93-8828-24856c3563a2" containerName="registry-server" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.671568 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fd07e88-eb0f-4f93-8828-24856c3563a2" containerName="registry-server" Oct 02 22:44:17 crc kubenswrapper[4636]: E1002 22:44:17.671591 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" containerName="tempest-tests-tempest-tests-runner" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.671602 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" containerName="tempest-tests-tempest-tests-runner" Oct 02 22:44:17 crc kubenswrapper[4636]: E1002 22:44:17.671632 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fd07e88-eb0f-4f93-8828-24856c3563a2" containerName="extract-utilities" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.671644 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fd07e88-eb0f-4f93-8828-24856c3563a2" containerName="extract-utilities" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.671923 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fd07e88-eb0f-4f93-8828-24856c3563a2" containerName="registry-server" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.671949 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="388bd5ff-d88f-4ea6-83e0-0ae99fc188ab" containerName="tempest-tests-tempest-tests-runner" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.673639 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.686967 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mpnqg"] Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.786408 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-utilities\") pod \"redhat-operators-mpnqg\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.786528 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-catalog-content\") pod \"redhat-operators-mpnqg\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.786560 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scktm\" (UniqueName: \"kubernetes.io/projected/871db98b-a200-4e8b-898d-c95863a05c56-kube-api-access-scktm\") pod \"redhat-operators-mpnqg\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.887890 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-catalog-content\") pod \"redhat-operators-mpnqg\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.887944 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scktm\" (UniqueName: \"kubernetes.io/projected/871db98b-a200-4e8b-898d-c95863a05c56-kube-api-access-scktm\") pod \"redhat-operators-mpnqg\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.888029 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-utilities\") pod \"redhat-operators-mpnqg\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.888477 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-utilities\") pod \"redhat-operators-mpnqg\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.888685 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-catalog-content\") pod \"redhat-operators-mpnqg\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:17 crc kubenswrapper[4636]: I1002 22:44:17.921929 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scktm\" (UniqueName: \"kubernetes.io/projected/871db98b-a200-4e8b-898d-c95863a05c56-kube-api-access-scktm\") pod \"redhat-operators-mpnqg\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:18 crc kubenswrapper[4636]: I1002 22:44:18.013991 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:18 crc kubenswrapper[4636]: I1002 22:44:18.568989 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mpnqg"] Oct 02 22:44:18 crc kubenswrapper[4636]: I1002 22:44:18.655314 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpnqg" event={"ID":"871db98b-a200-4e8b-898d-c95863a05c56","Type":"ContainerStarted","Data":"569cbae4ffe8769b0b0e539b3a5bca1c5965fef010e8347abbbeda729370369e"} Oct 02 22:44:19 crc kubenswrapper[4636]: I1002 22:44:19.666399 4636 generic.go:334] "Generic (PLEG): container finished" podID="871db98b-a200-4e8b-898d-c95863a05c56" containerID="a9c4a0014cf68e4610f0a5f86bc5696b8197adbdc3ef76c686749103c1997283" exitCode=0 Oct 02 22:44:19 crc kubenswrapper[4636]: I1002 22:44:19.666739 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpnqg" event={"ID":"871db98b-a200-4e8b-898d-c95863a05c56","Type":"ContainerDied","Data":"a9c4a0014cf68e4610f0a5f86bc5696b8197adbdc3ef76c686749103c1997283"} Oct 02 22:44:19 crc kubenswrapper[4636]: I1002 22:44:19.669249 4636 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 22:44:21 crc kubenswrapper[4636]: I1002 22:44:21.707458 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpnqg" event={"ID":"871db98b-a200-4e8b-898d-c95863a05c56","Type":"ContainerStarted","Data":"3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6"} Oct 02 22:44:25 crc kubenswrapper[4636]: I1002 22:44:25.779803 4636 generic.go:334] "Generic (PLEG): container finished" podID="871db98b-a200-4e8b-898d-c95863a05c56" containerID="3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6" exitCode=0 Oct 02 22:44:25 crc kubenswrapper[4636]: I1002 22:44:25.779931 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpnqg" event={"ID":"871db98b-a200-4e8b-898d-c95863a05c56","Type":"ContainerDied","Data":"3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6"} Oct 02 22:44:26 crc kubenswrapper[4636]: I1002 22:44:26.790556 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpnqg" event={"ID":"871db98b-a200-4e8b-898d-c95863a05c56","Type":"ContainerStarted","Data":"1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768"} Oct 02 22:44:26 crc kubenswrapper[4636]: I1002 22:44:26.815664 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mpnqg" podStartSLOduration=3.295260125 podStartE2EDuration="9.815646282s" podCreationTimestamp="2025-10-02 22:44:17 +0000 UTC" firstStartedPulling="2025-10-02 22:44:19.669028047 +0000 UTC m=+4850.992036066" lastFinishedPulling="2025-10-02 22:44:26.189414194 +0000 UTC m=+4857.512422223" observedRunningTime="2025-10-02 22:44:26.805406398 +0000 UTC m=+4858.128414437" watchObservedRunningTime="2025-10-02 22:44:26.815646282 +0000 UTC m=+4858.138654301" Oct 02 22:44:26 crc kubenswrapper[4636]: I1002 22:44:26.864607 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 02 22:44:26 crc kubenswrapper[4636]: I1002 22:44:26.866049 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 02 22:44:26 crc kubenswrapper[4636]: I1002 22:44:26.868573 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-n75qw" Oct 02 22:44:26 crc kubenswrapper[4636]: I1002 22:44:26.875588 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 02 22:44:26 crc kubenswrapper[4636]: I1002 22:44:26.878731 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzrmg\" (UniqueName: \"kubernetes.io/projected/a9bd5c32-a2ce-4c0c-b460-c7863fc0976e-kube-api-access-zzrmg\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a9bd5c32-a2ce-4c0c-b460-c7863fc0976e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 02 22:44:26 crc kubenswrapper[4636]: I1002 22:44:26.878802 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a9bd5c32-a2ce-4c0c-b460-c7863fc0976e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 02 22:44:26 crc kubenswrapper[4636]: I1002 22:44:26.980924 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzrmg\" (UniqueName: \"kubernetes.io/projected/a9bd5c32-a2ce-4c0c-b460-c7863fc0976e-kube-api-access-zzrmg\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a9bd5c32-a2ce-4c0c-b460-c7863fc0976e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 02 22:44:26 crc kubenswrapper[4636]: I1002 22:44:26.980975 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a9bd5c32-a2ce-4c0c-b460-c7863fc0976e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 02 22:44:26 crc kubenswrapper[4636]: I1002 22:44:26.981602 4636 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a9bd5c32-a2ce-4c0c-b460-c7863fc0976e\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 02 22:44:27 crc kubenswrapper[4636]: I1002 22:44:27.031046 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzrmg\" (UniqueName: \"kubernetes.io/projected/a9bd5c32-a2ce-4c0c-b460-c7863fc0976e-kube-api-access-zzrmg\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a9bd5c32-a2ce-4c0c-b460-c7863fc0976e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 02 22:44:27 crc kubenswrapper[4636]: I1002 22:44:27.039537 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a9bd5c32-a2ce-4c0c-b460-c7863fc0976e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 02 22:44:27 crc kubenswrapper[4636]: I1002 22:44:27.188355 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 02 22:44:27 crc kubenswrapper[4636]: I1002 22:44:27.681321 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 02 22:44:27 crc kubenswrapper[4636]: I1002 22:44:27.802279 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"a9bd5c32-a2ce-4c0c-b460-c7863fc0976e","Type":"ContainerStarted","Data":"b87bd6ef16b70666746ce6587f9b762dd81333114b95d1c14b59b7576fae0e9d"} Oct 02 22:44:28 crc kubenswrapper[4636]: I1002 22:44:28.014820 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:28 crc kubenswrapper[4636]: I1002 22:44:28.014858 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:29 crc kubenswrapper[4636]: I1002 22:44:29.064659 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mpnqg" podUID="871db98b-a200-4e8b-898d-c95863a05c56" containerName="registry-server" probeResult="failure" output=< Oct 02 22:44:29 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 22:44:29 crc kubenswrapper[4636]: > Oct 02 22:44:29 crc kubenswrapper[4636]: I1002 22:44:29.824309 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"a9bd5c32-a2ce-4c0c-b460-c7863fc0976e","Type":"ContainerStarted","Data":"b212ba520f8fde4c3cc4b1071622850a8a5b588ed7235d62f90d68b88db61d28"} Oct 02 22:44:29 crc kubenswrapper[4636]: I1002 22:44:29.847184 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.797103525 podStartE2EDuration="3.847162383s" podCreationTimestamp="2025-10-02 22:44:26 +0000 UTC" firstStartedPulling="2025-10-02 22:44:27.689422307 +0000 UTC m=+4859.012430326" lastFinishedPulling="2025-10-02 22:44:28.739481165 +0000 UTC m=+4860.062489184" observedRunningTime="2025-10-02 22:44:29.844674117 +0000 UTC m=+4861.167682156" watchObservedRunningTime="2025-10-02 22:44:29.847162383 +0000 UTC m=+4861.170170402" Oct 02 22:44:38 crc kubenswrapper[4636]: I1002 22:44:38.066898 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:38 crc kubenswrapper[4636]: I1002 22:44:38.121569 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:38 crc kubenswrapper[4636]: I1002 22:44:38.305287 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mpnqg"] Oct 02 22:44:39 crc kubenswrapper[4636]: I1002 22:44:39.914630 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mpnqg" podUID="871db98b-a200-4e8b-898d-c95863a05c56" containerName="registry-server" containerID="cri-o://1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768" gracePeriod=2 Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.378036 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.443554 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-catalog-content\") pod \"871db98b-a200-4e8b-898d-c95863a05c56\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.443607 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scktm\" (UniqueName: \"kubernetes.io/projected/871db98b-a200-4e8b-898d-c95863a05c56-kube-api-access-scktm\") pod \"871db98b-a200-4e8b-898d-c95863a05c56\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.443662 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-utilities\") pod \"871db98b-a200-4e8b-898d-c95863a05c56\" (UID: \"871db98b-a200-4e8b-898d-c95863a05c56\") " Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.446688 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-utilities" (OuterVolumeSpecName: "utilities") pod "871db98b-a200-4e8b-898d-c95863a05c56" (UID: "871db98b-a200-4e8b-898d-c95863a05c56"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.453007 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/871db98b-a200-4e8b-898d-c95863a05c56-kube-api-access-scktm" (OuterVolumeSpecName: "kube-api-access-scktm") pod "871db98b-a200-4e8b-898d-c95863a05c56" (UID: "871db98b-a200-4e8b-898d-c95863a05c56"). InnerVolumeSpecName "kube-api-access-scktm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.546026 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.546064 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scktm\" (UniqueName: \"kubernetes.io/projected/871db98b-a200-4e8b-898d-c95863a05c56-kube-api-access-scktm\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.569103 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "871db98b-a200-4e8b-898d-c95863a05c56" (UID: "871db98b-a200-4e8b-898d-c95863a05c56"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.650564 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/871db98b-a200-4e8b-898d-c95863a05c56-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.927088 4636 generic.go:334] "Generic (PLEG): container finished" podID="871db98b-a200-4e8b-898d-c95863a05c56" containerID="1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768" exitCode=0 Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.927137 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpnqg" event={"ID":"871db98b-a200-4e8b-898d-c95863a05c56","Type":"ContainerDied","Data":"1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768"} Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.927197 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpnqg" event={"ID":"871db98b-a200-4e8b-898d-c95863a05c56","Type":"ContainerDied","Data":"569cbae4ffe8769b0b0e539b3a5bca1c5965fef010e8347abbbeda729370369e"} Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.927215 4636 scope.go:117] "RemoveContainer" containerID="1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768" Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.927267 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mpnqg" Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.955073 4636 scope.go:117] "RemoveContainer" containerID="3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6" Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.972923 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mpnqg"] Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.982538 4636 scope.go:117] "RemoveContainer" containerID="a9c4a0014cf68e4610f0a5f86bc5696b8197adbdc3ef76c686749103c1997283" Oct 02 22:44:40 crc kubenswrapper[4636]: I1002 22:44:40.983122 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mpnqg"] Oct 02 22:44:41 crc kubenswrapper[4636]: I1002 22:44:41.028596 4636 scope.go:117] "RemoveContainer" containerID="1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768" Oct 02 22:44:41 crc kubenswrapper[4636]: E1002 22:44:41.029841 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768\": container with ID starting with 1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768 not found: ID does not exist" containerID="1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768" Oct 02 22:44:41 crc kubenswrapper[4636]: I1002 22:44:41.029879 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768"} err="failed to get container status \"1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768\": rpc error: code = NotFound desc = could not find container \"1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768\": container with ID starting with 1a92d094a970137770e65ced4c21c02c57e0f3564c9485222932c58e38bb4768 not found: ID does not exist" Oct 02 22:44:41 crc kubenswrapper[4636]: I1002 22:44:41.029905 4636 scope.go:117] "RemoveContainer" containerID="3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6" Oct 02 22:44:41 crc kubenswrapper[4636]: E1002 22:44:41.031269 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6\": container with ID starting with 3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6 not found: ID does not exist" containerID="3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6" Oct 02 22:44:41 crc kubenswrapper[4636]: I1002 22:44:41.031305 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6"} err="failed to get container status \"3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6\": rpc error: code = NotFound desc = could not find container \"3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6\": container with ID starting with 3039f0e889eade9612d556b14b21b3da73dfd0626dbeb24e4f8ef6f787b2a9c6 not found: ID does not exist" Oct 02 22:44:41 crc kubenswrapper[4636]: I1002 22:44:41.031326 4636 scope.go:117] "RemoveContainer" containerID="a9c4a0014cf68e4610f0a5f86bc5696b8197adbdc3ef76c686749103c1997283" Oct 02 22:44:41 crc kubenswrapper[4636]: E1002 22:44:41.031940 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9c4a0014cf68e4610f0a5f86bc5696b8197adbdc3ef76c686749103c1997283\": container with ID starting with a9c4a0014cf68e4610f0a5f86bc5696b8197adbdc3ef76c686749103c1997283 not found: ID does not exist" containerID="a9c4a0014cf68e4610f0a5f86bc5696b8197adbdc3ef76c686749103c1997283" Oct 02 22:44:41 crc kubenswrapper[4636]: I1002 22:44:41.031972 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9c4a0014cf68e4610f0a5f86bc5696b8197adbdc3ef76c686749103c1997283"} err="failed to get container status \"a9c4a0014cf68e4610f0a5f86bc5696b8197adbdc3ef76c686749103c1997283\": rpc error: code = NotFound desc = could not find container \"a9c4a0014cf68e4610f0a5f86bc5696b8197adbdc3ef76c686749103c1997283\": container with ID starting with a9c4a0014cf68e4610f0a5f86bc5696b8197adbdc3ef76c686749103c1997283 not found: ID does not exist" Oct 02 22:44:41 crc kubenswrapper[4636]: I1002 22:44:41.613177 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="871db98b-a200-4e8b-898d-c95863a05c56" path="/var/lib/kubelet/pods/871db98b-a200-4e8b-898d-c95863a05c56/volumes" Oct 02 22:44:53 crc kubenswrapper[4636]: I1002 22:44:53.117269 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:44:53 crc kubenswrapper[4636]: I1002 22:44:53.117827 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.155354 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn"] Oct 02 22:45:00 crc kubenswrapper[4636]: E1002 22:45:00.156670 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="871db98b-a200-4e8b-898d-c95863a05c56" containerName="extract-utilities" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.156692 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="871db98b-a200-4e8b-898d-c95863a05c56" containerName="extract-utilities" Oct 02 22:45:00 crc kubenswrapper[4636]: E1002 22:45:00.156735 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="871db98b-a200-4e8b-898d-c95863a05c56" containerName="extract-content" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.156767 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="871db98b-a200-4e8b-898d-c95863a05c56" containerName="extract-content" Oct 02 22:45:00 crc kubenswrapper[4636]: E1002 22:45:00.156795 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="871db98b-a200-4e8b-898d-c95863a05c56" containerName="registry-server" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.156807 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="871db98b-a200-4e8b-898d-c95863a05c56" containerName="registry-server" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.157391 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="871db98b-a200-4e8b-898d-c95863a05c56" containerName="registry-server" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.158300 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.160373 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.160383 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.163172 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn"] Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.347942 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7953a6c-a71c-416f-8603-2fa76f81d825-secret-volume\") pod \"collect-profiles-29324085-phbbn\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.348067 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7953a6c-a71c-416f-8603-2fa76f81d825-config-volume\") pod \"collect-profiles-29324085-phbbn\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.348109 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsvv8\" (UniqueName: \"kubernetes.io/projected/b7953a6c-a71c-416f-8603-2fa76f81d825-kube-api-access-nsvv8\") pod \"collect-profiles-29324085-phbbn\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.450611 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7953a6c-a71c-416f-8603-2fa76f81d825-config-volume\") pod \"collect-profiles-29324085-phbbn\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.450719 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsvv8\" (UniqueName: \"kubernetes.io/projected/b7953a6c-a71c-416f-8603-2fa76f81d825-kube-api-access-nsvv8\") pod \"collect-profiles-29324085-phbbn\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.450967 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7953a6c-a71c-416f-8603-2fa76f81d825-secret-volume\") pod \"collect-profiles-29324085-phbbn\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.451525 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7953a6c-a71c-416f-8603-2fa76f81d825-config-volume\") pod \"collect-profiles-29324085-phbbn\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.460667 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7953a6c-a71c-416f-8603-2fa76f81d825-secret-volume\") pod \"collect-profiles-29324085-phbbn\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.471581 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsvv8\" (UniqueName: \"kubernetes.io/projected/b7953a6c-a71c-416f-8603-2fa76f81d825-kube-api-access-nsvv8\") pod \"collect-profiles-29324085-phbbn\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.521248 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:00 crc kubenswrapper[4636]: I1002 22:45:00.958885 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn"] Oct 02 22:45:00 crc kubenswrapper[4636]: W1002 22:45:00.980489 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7953a6c_a71c_416f_8603_2fa76f81d825.slice/crio-021d025833f8f0e6e319a3cc0c81527b178f60625be9d5592ababbfb119bba43 WatchSource:0}: Error finding container 021d025833f8f0e6e319a3cc0c81527b178f60625be9d5592ababbfb119bba43: Status 404 returned error can't find the container with id 021d025833f8f0e6e319a3cc0c81527b178f60625be9d5592ababbfb119bba43 Oct 02 22:45:01 crc kubenswrapper[4636]: I1002 22:45:01.111085 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" event={"ID":"b7953a6c-a71c-416f-8603-2fa76f81d825","Type":"ContainerStarted","Data":"021d025833f8f0e6e319a3cc0c81527b178f60625be9d5592ababbfb119bba43"} Oct 02 22:45:02 crc kubenswrapper[4636]: I1002 22:45:02.121674 4636 generic.go:334] "Generic (PLEG): container finished" podID="b7953a6c-a71c-416f-8603-2fa76f81d825" containerID="ef3f89de6e8ef651df991700ca6e3d44457510477672d27e913b6c00b809ea8c" exitCode=0 Oct 02 22:45:02 crc kubenswrapper[4636]: I1002 22:45:02.121973 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" event={"ID":"b7953a6c-a71c-416f-8603-2fa76f81d825","Type":"ContainerDied","Data":"ef3f89de6e8ef651df991700ca6e3d44457510477672d27e913b6c00b809ea8c"} Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.152436 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" event={"ID":"b7953a6c-a71c-416f-8603-2fa76f81d825","Type":"ContainerDied","Data":"021d025833f8f0e6e319a3cc0c81527b178f60625be9d5592ababbfb119bba43"} Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.153094 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="021d025833f8f0e6e319a3cc0c81527b178f60625be9d5592ababbfb119bba43" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.183899 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.343805 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsvv8\" (UniqueName: \"kubernetes.io/projected/b7953a6c-a71c-416f-8603-2fa76f81d825-kube-api-access-nsvv8\") pod \"b7953a6c-a71c-416f-8603-2fa76f81d825\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.343933 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7953a6c-a71c-416f-8603-2fa76f81d825-secret-volume\") pod \"b7953a6c-a71c-416f-8603-2fa76f81d825\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.343972 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7953a6c-a71c-416f-8603-2fa76f81d825-config-volume\") pod \"b7953a6c-a71c-416f-8603-2fa76f81d825\" (UID: \"b7953a6c-a71c-416f-8603-2fa76f81d825\") " Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.344704 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7953a6c-a71c-416f-8603-2fa76f81d825-config-volume" (OuterVolumeSpecName: "config-volume") pod "b7953a6c-a71c-416f-8603-2fa76f81d825" (UID: "b7953a6c-a71c-416f-8603-2fa76f81d825"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.349423 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7953a6c-a71c-416f-8603-2fa76f81d825-kube-api-access-nsvv8" (OuterVolumeSpecName: "kube-api-access-nsvv8") pod "b7953a6c-a71c-416f-8603-2fa76f81d825" (UID: "b7953a6c-a71c-416f-8603-2fa76f81d825"). InnerVolumeSpecName "kube-api-access-nsvv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.350384 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7953a6c-a71c-416f-8603-2fa76f81d825-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b7953a6c-a71c-416f-8603-2fa76f81d825" (UID: "b7953a6c-a71c-416f-8603-2fa76f81d825"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.445837 4636 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7953a6c-a71c-416f-8603-2fa76f81d825-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.445867 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsvv8\" (UniqueName: \"kubernetes.io/projected/b7953a6c-a71c-416f-8603-2fa76f81d825-kube-api-access-nsvv8\") on node \"crc\" DevicePath \"\"" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.445877 4636 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7953a6c-a71c-416f-8603-2fa76f81d825-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.766860 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9zc6x/must-gather-6ms5s"] Oct 02 22:45:04 crc kubenswrapper[4636]: E1002 22:45:04.767244 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7953a6c-a71c-416f-8603-2fa76f81d825" containerName="collect-profiles" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.767262 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7953a6c-a71c-416f-8603-2fa76f81d825" containerName="collect-profiles" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.767467 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7953a6c-a71c-416f-8603-2fa76f81d825" containerName="collect-profiles" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.768378 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/must-gather-6ms5s" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.770381 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9zc6x"/"openshift-service-ca.crt" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.771258 4636 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9zc6x"/"kube-root-ca.crt" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.772284 4636 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9zc6x"/"default-dockercfg-khscp" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.785619 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9zc6x/must-gather-6ms5s"] Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.853263 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b6333460-ee9d-4e83-b3b3-4ec81848af13-must-gather-output\") pod \"must-gather-6ms5s\" (UID: \"b6333460-ee9d-4e83-b3b3-4ec81848af13\") " pod="openshift-must-gather-9zc6x/must-gather-6ms5s" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.853647 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n9dv\" (UniqueName: \"kubernetes.io/projected/b6333460-ee9d-4e83-b3b3-4ec81848af13-kube-api-access-4n9dv\") pod \"must-gather-6ms5s\" (UID: \"b6333460-ee9d-4e83-b3b3-4ec81848af13\") " pod="openshift-must-gather-9zc6x/must-gather-6ms5s" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.955230 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b6333460-ee9d-4e83-b3b3-4ec81848af13-must-gather-output\") pod \"must-gather-6ms5s\" (UID: \"b6333460-ee9d-4e83-b3b3-4ec81848af13\") " pod="openshift-must-gather-9zc6x/must-gather-6ms5s" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.955537 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n9dv\" (UniqueName: \"kubernetes.io/projected/b6333460-ee9d-4e83-b3b3-4ec81848af13-kube-api-access-4n9dv\") pod \"must-gather-6ms5s\" (UID: \"b6333460-ee9d-4e83-b3b3-4ec81848af13\") " pod="openshift-must-gather-9zc6x/must-gather-6ms5s" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.955642 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b6333460-ee9d-4e83-b3b3-4ec81848af13-must-gather-output\") pod \"must-gather-6ms5s\" (UID: \"b6333460-ee9d-4e83-b3b3-4ec81848af13\") " pod="openshift-must-gather-9zc6x/must-gather-6ms5s" Oct 02 22:45:04 crc kubenswrapper[4636]: I1002 22:45:04.973048 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n9dv\" (UniqueName: \"kubernetes.io/projected/b6333460-ee9d-4e83-b3b3-4ec81848af13-kube-api-access-4n9dv\") pod \"must-gather-6ms5s\" (UID: \"b6333460-ee9d-4e83-b3b3-4ec81848af13\") " pod="openshift-must-gather-9zc6x/must-gather-6ms5s" Oct 02 22:45:05 crc kubenswrapper[4636]: I1002 22:45:05.094936 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/must-gather-6ms5s" Oct 02 22:45:05 crc kubenswrapper[4636]: I1002 22:45:05.191555 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:05 crc kubenswrapper[4636]: I1002 22:45:05.274505 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb"] Oct 02 22:45:05 crc kubenswrapper[4636]: I1002 22:45:05.295221 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324040-x8sbb"] Oct 02 22:45:05 crc kubenswrapper[4636]: I1002 22:45:05.565428 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9zc6x/must-gather-6ms5s"] Oct 02 22:45:05 crc kubenswrapper[4636]: I1002 22:45:05.616446 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9" path="/var/lib/kubelet/pods/4bccf6c7-a7aa-4565-a8b1-7d9acb44e1e9/volumes" Oct 02 22:45:05 crc kubenswrapper[4636]: W1002 22:45:05.654355 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6333460_ee9d_4e83_b3b3_4ec81848af13.slice/crio-6db8cdf28452b9e5afea0fb31fa2126105c63b12cf729e009fcc92649595ee85 WatchSource:0}: Error finding container 6db8cdf28452b9e5afea0fb31fa2126105c63b12cf729e009fcc92649595ee85: Status 404 returned error can't find the container with id 6db8cdf28452b9e5afea0fb31fa2126105c63b12cf729e009fcc92649595ee85 Oct 02 22:45:06 crc kubenswrapper[4636]: I1002 22:45:06.201553 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/must-gather-6ms5s" event={"ID":"b6333460-ee9d-4e83-b3b3-4ec81848af13","Type":"ContainerStarted","Data":"6db8cdf28452b9e5afea0fb31fa2126105c63b12cf729e009fcc92649595ee85"} Oct 02 22:45:07 crc kubenswrapper[4636]: I1002 22:45:07.113588 4636 scope.go:117] "RemoveContainer" containerID="fc08f0071358ca188acc2a17eb6c0ea67aaca171c2f7a5e53545687bb98fc830" Oct 02 22:45:11 crc kubenswrapper[4636]: I1002 22:45:11.261031 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/must-gather-6ms5s" event={"ID":"b6333460-ee9d-4e83-b3b3-4ec81848af13","Type":"ContainerStarted","Data":"99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479"} Oct 02 22:45:11 crc kubenswrapper[4636]: I1002 22:45:11.261605 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/must-gather-6ms5s" event={"ID":"b6333460-ee9d-4e83-b3b3-4ec81848af13","Type":"ContainerStarted","Data":"07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759"} Oct 02 22:45:11 crc kubenswrapper[4636]: I1002 22:45:11.282664 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9zc6x/must-gather-6ms5s" podStartSLOduration=2.833165421 podStartE2EDuration="7.282639502s" podCreationTimestamp="2025-10-02 22:45:04 +0000 UTC" firstStartedPulling="2025-10-02 22:45:05.656460118 +0000 UTC m=+4896.979468137" lastFinishedPulling="2025-10-02 22:45:10.105934199 +0000 UTC m=+4901.428942218" observedRunningTime="2025-10-02 22:45:11.276439706 +0000 UTC m=+4902.599447735" watchObservedRunningTime="2025-10-02 22:45:11.282639502 +0000 UTC m=+4902.605647521" Oct 02 22:45:15 crc kubenswrapper[4636]: I1002 22:45:15.824088 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9zc6x/crc-debug-z4jqb"] Oct 02 22:45:15 crc kubenswrapper[4636]: I1002 22:45:15.827243 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" Oct 02 22:45:15 crc kubenswrapper[4636]: I1002 22:45:15.897367 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/731b6858-850b-434d-b249-6ca0836def25-host\") pod \"crc-debug-z4jqb\" (UID: \"731b6858-850b-434d-b249-6ca0836def25\") " pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" Oct 02 22:45:15 crc kubenswrapper[4636]: I1002 22:45:15.897901 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpm5q\" (UniqueName: \"kubernetes.io/projected/731b6858-850b-434d-b249-6ca0836def25-kube-api-access-zpm5q\") pod \"crc-debug-z4jqb\" (UID: \"731b6858-850b-434d-b249-6ca0836def25\") " pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" Oct 02 22:45:15 crc kubenswrapper[4636]: I1002 22:45:15.999713 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpm5q\" (UniqueName: \"kubernetes.io/projected/731b6858-850b-434d-b249-6ca0836def25-kube-api-access-zpm5q\") pod \"crc-debug-z4jqb\" (UID: \"731b6858-850b-434d-b249-6ca0836def25\") " pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" Oct 02 22:45:16 crc kubenswrapper[4636]: I1002 22:45:16.000152 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/731b6858-850b-434d-b249-6ca0836def25-host\") pod \"crc-debug-z4jqb\" (UID: \"731b6858-850b-434d-b249-6ca0836def25\") " pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" Oct 02 22:45:16 crc kubenswrapper[4636]: I1002 22:45:16.000313 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/731b6858-850b-434d-b249-6ca0836def25-host\") pod \"crc-debug-z4jqb\" (UID: \"731b6858-850b-434d-b249-6ca0836def25\") " pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" Oct 02 22:45:16 crc kubenswrapper[4636]: I1002 22:45:16.025851 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpm5q\" (UniqueName: \"kubernetes.io/projected/731b6858-850b-434d-b249-6ca0836def25-kube-api-access-zpm5q\") pod \"crc-debug-z4jqb\" (UID: \"731b6858-850b-434d-b249-6ca0836def25\") " pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" Oct 02 22:45:16 crc kubenswrapper[4636]: I1002 22:45:16.146028 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" Oct 02 22:45:16 crc kubenswrapper[4636]: W1002 22:45:16.196286 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod731b6858_850b_434d_b249_6ca0836def25.slice/crio-2d9cadef7cff2e2460df445aecad067ec0a952f7ed9f2b1fed0d1bde1ebddae1 WatchSource:0}: Error finding container 2d9cadef7cff2e2460df445aecad067ec0a952f7ed9f2b1fed0d1bde1ebddae1: Status 404 returned error can't find the container with id 2d9cadef7cff2e2460df445aecad067ec0a952f7ed9f2b1fed0d1bde1ebddae1 Oct 02 22:45:16 crc kubenswrapper[4636]: I1002 22:45:16.306415 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" event={"ID":"731b6858-850b-434d-b249-6ca0836def25","Type":"ContainerStarted","Data":"2d9cadef7cff2e2460df445aecad067ec0a952f7ed9f2b1fed0d1bde1ebddae1"} Oct 02 22:45:23 crc kubenswrapper[4636]: I1002 22:45:23.117513 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:45:23 crc kubenswrapper[4636]: I1002 22:45:23.118088 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:45:29 crc kubenswrapper[4636]: I1002 22:45:29.465244 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" event={"ID":"731b6858-850b-434d-b249-6ca0836def25","Type":"ContainerStarted","Data":"be9fc0b6de7cfd4084897d526469bf5ad509c5f01f68361ef3d844da90f20e91"} Oct 02 22:45:29 crc kubenswrapper[4636]: I1002 22:45:29.489981 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" podStartSLOduration=1.917229721 podStartE2EDuration="14.489960552s" podCreationTimestamp="2025-10-02 22:45:15 +0000 UTC" firstStartedPulling="2025-10-02 22:45:16.198282074 +0000 UTC m=+4907.521290083" lastFinishedPulling="2025-10-02 22:45:28.771012895 +0000 UTC m=+4920.094020914" observedRunningTime="2025-10-02 22:45:29.476977415 +0000 UTC m=+4920.799985444" watchObservedRunningTime="2025-10-02 22:45:29.489960552 +0000 UTC m=+4920.812968591" Oct 02 22:45:35 crc kubenswrapper[4636]: I1002 22:45:35.831455 4636 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","podb7953a6c-a71c-416f-8603-2fa76f81d825"] err="unable to destroy cgroup paths for cgroup [kubepods burstable podb7953a6c-a71c-416f-8603-2fa76f81d825] : Timed out while waiting for systemd to remove kubepods-burstable-podb7953a6c_a71c_416f_8603_2fa76f81d825.slice" Oct 02 22:45:35 crc kubenswrapper[4636]: E1002 22:45:35.832797 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods burstable podb7953a6c-a71c-416f-8603-2fa76f81d825] : unable to destroy cgroup paths for cgroup [kubepods burstable podb7953a6c-a71c-416f-8603-2fa76f81d825] : Timed out while waiting for systemd to remove kubepods-burstable-podb7953a6c_a71c_416f_8603_2fa76f81d825.slice" pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" podUID="b7953a6c-a71c-416f-8603-2fa76f81d825" Oct 02 22:45:36 crc kubenswrapper[4636]: I1002 22:45:36.518913 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324085-phbbn" Oct 02 22:45:53 crc kubenswrapper[4636]: I1002 22:45:53.117662 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:45:53 crc kubenswrapper[4636]: I1002 22:45:53.119095 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:45:53 crc kubenswrapper[4636]: I1002 22:45:53.119308 4636 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" Oct 02 22:45:53 crc kubenswrapper[4636]: I1002 22:45:53.121045 4636 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719"} pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 22:45:53 crc kubenswrapper[4636]: I1002 22:45:53.121178 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" containerID="cri-o://a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" gracePeriod=600 Oct 02 22:45:53 crc kubenswrapper[4636]: E1002 22:45:53.259095 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:45:53 crc kubenswrapper[4636]: I1002 22:45:53.703564 4636 generic.go:334] "Generic (PLEG): container finished" podID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" exitCode=0 Oct 02 22:45:53 crc kubenswrapper[4636]: I1002 22:45:53.703656 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerDied","Data":"a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719"} Oct 02 22:45:53 crc kubenswrapper[4636]: I1002 22:45:53.703945 4636 scope.go:117] "RemoveContainer" containerID="d07658d14c348f924c225743c75fcd5b94b9d94c43641ed3fd4c0cbf7a1d8bef" Oct 02 22:45:53 crc kubenswrapper[4636]: I1002 22:45:53.704308 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:45:53 crc kubenswrapper[4636]: E1002 22:45:53.704565 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:46:04 crc kubenswrapper[4636]: I1002 22:46:04.604162 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:46:04 crc kubenswrapper[4636]: E1002 22:46:04.604921 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:46:19 crc kubenswrapper[4636]: I1002 22:46:19.613561 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:46:19 crc kubenswrapper[4636]: E1002 22:46:19.614438 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:46:30 crc kubenswrapper[4636]: I1002 22:46:30.603416 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:46:30 crc kubenswrapper[4636]: E1002 22:46:30.604168 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:46:41 crc kubenswrapper[4636]: I1002 22:46:41.611138 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:46:41 crc kubenswrapper[4636]: E1002 22:46:41.612063 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:46:50 crc kubenswrapper[4636]: I1002 22:46:50.751626 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54b7f7458b-qqbgx_59bde702-8c1a-491a-a1f8-0c0862f05d77/barbican-api/0.log" Oct 02 22:46:50 crc kubenswrapper[4636]: I1002 22:46:50.764831 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54b7f7458b-qqbgx_59bde702-8c1a-491a-a1f8-0c0862f05d77/barbican-api-log/0.log" Oct 02 22:46:50 crc kubenswrapper[4636]: I1002 22:46:50.985394 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-c5fc5f7c4-cvl68_9140b0c4-6589-4b25-b300-0d4421daca16/barbican-keystone-listener/0.log" Oct 02 22:46:51 crc kubenswrapper[4636]: I1002 22:46:51.178989 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-c5fc5f7c4-cvl68_9140b0c4-6589-4b25-b300-0d4421daca16/barbican-keystone-listener-log/0.log" Oct 02 22:46:51 crc kubenswrapper[4636]: I1002 22:46:51.247388 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6f9d95566c-lfqlc_b8eb7caa-f57a-474e-b86a-f85079b23081/barbican-worker/0.log" Oct 02 22:46:51 crc kubenswrapper[4636]: I1002 22:46:51.371644 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6f9d95566c-lfqlc_b8eb7caa-f57a-474e-b86a-f85079b23081/barbican-worker-log/0.log" Oct 02 22:46:51 crc kubenswrapper[4636]: I1002 22:46:51.507363 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-fzwjn_66588ffc-e8a9-4ced-a324-c9d436880e52/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:46:51 crc kubenswrapper[4636]: I1002 22:46:51.795856 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3d58ed15-a4f4-4cb0-8d57-95f5a863bca3/ceilometer-central-agent/0.log" Oct 02 22:46:51 crc kubenswrapper[4636]: I1002 22:46:51.834580 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3d58ed15-a4f4-4cb0-8d57-95f5a863bca3/ceilometer-notification-agent/0.log" Oct 02 22:46:51 crc kubenswrapper[4636]: I1002 22:46:51.930475 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3d58ed15-a4f4-4cb0-8d57-95f5a863bca3/proxy-httpd/0.log" Oct 02 22:46:52 crc kubenswrapper[4636]: I1002 22:46:52.047890 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3d58ed15-a4f4-4cb0-8d57-95f5a863bca3/sg-core/0.log" Oct 02 22:46:52 crc kubenswrapper[4636]: I1002 22:46:52.284389 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_987f22a6-6842-4d5c-ac32-ef7698f66ed1/cinder-api/0.log" Oct 02 22:46:52 crc kubenswrapper[4636]: I1002 22:46:52.361966 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_987f22a6-6842-4d5c-ac32-ef7698f66ed1/cinder-api-log/0.log" Oct 02 22:46:52 crc kubenswrapper[4636]: I1002 22:46:52.791998 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_78574741-b4d5-4f6c-a92e-37cf2aeeeaca/cinder-scheduler/0.log" Oct 02 22:46:52 crc kubenswrapper[4636]: I1002 22:46:52.862131 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_78574741-b4d5-4f6c-a92e-37cf2aeeeaca/probe/0.log" Oct 02 22:46:53 crc kubenswrapper[4636]: I1002 22:46:53.033370 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-z7cxz_7345fa48-35b0-41e9-ae50-3920e8a1a6f5/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:46:53 crc kubenswrapper[4636]: I1002 22:46:53.311039 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-27qbg_3306d272-f7a6-40b6-87c5-13ea12f59e31/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:46:53 crc kubenswrapper[4636]: I1002 22:46:53.438502 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-d7466_593f05c7-4207-4893-9fe1-7487a5a7718c/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:46:53 crc kubenswrapper[4636]: I1002 22:46:53.603818 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:46:53 crc kubenswrapper[4636]: E1002 22:46:53.604165 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:46:53 crc kubenswrapper[4636]: I1002 22:46:53.631136 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79dc84bdb7-4sx8m_b21803db-480b-4261-a6d3-62042843c92f/init/0.log" Oct 02 22:46:53 crc kubenswrapper[4636]: I1002 22:46:53.790850 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79dc84bdb7-4sx8m_b21803db-480b-4261-a6d3-62042843c92f/init/0.log" Oct 02 22:46:53 crc kubenswrapper[4636]: I1002 22:46:53.935392 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-c7jlb_49804afe-9b10-4aef-bd85-414372732d36/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:46:54 crc kubenswrapper[4636]: I1002 22:46:54.121674 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79dc84bdb7-4sx8m_b21803db-480b-4261-a6d3-62042843c92f/dnsmasq-dns/0.log" Oct 02 22:46:54 crc kubenswrapper[4636]: I1002 22:46:54.284146 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_1f3dba52-50a6-4789-a799-4e24bbb6e5ab/glance-httpd/0.log" Oct 02 22:46:54 crc kubenswrapper[4636]: I1002 22:46:54.347696 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_1f3dba52-50a6-4789-a799-4e24bbb6e5ab/glance-log/0.log" Oct 02 22:46:54 crc kubenswrapper[4636]: I1002 22:46:54.522346 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f2d7a9fb-d15d-4365-97bd-5355c4f6969f/glance-httpd/0.log" Oct 02 22:46:54 crc kubenswrapper[4636]: I1002 22:46:54.548566 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f2d7a9fb-d15d-4365-97bd-5355c4f6969f/glance-log/0.log" Oct 02 22:46:54 crc kubenswrapper[4636]: I1002 22:46:54.746567 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7646d88f4d-85mgl_65063729-cda3-488f-8e94-364db15e2d2d/horizon/2.log" Oct 02 22:46:54 crc kubenswrapper[4636]: I1002 22:46:54.923329 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7646d88f4d-85mgl_65063729-cda3-488f-8e94-364db15e2d2d/horizon/1.log" Oct 02 22:46:55 crc kubenswrapper[4636]: I1002 22:46:55.066302 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-v9sxf_c9420cf3-5ced-4fd8-a57e-c61c9ca5c5b0/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:46:55 crc kubenswrapper[4636]: I1002 22:46:55.206628 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7646d88f4d-85mgl_65063729-cda3-488f-8e94-364db15e2d2d/horizon-log/0.log" Oct 02 22:46:55 crc kubenswrapper[4636]: I1002 22:46:55.291870 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-mcd5b_0db4951d-65e4-4a9e-9761-433d6cfb17c9/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:46:55 crc kubenswrapper[4636]: I1002 22:46:55.625250 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29324041-ffnwj_831000fc-4ab3-4609-b42f-f45920de8917/keystone-cron/0.log" Oct 02 22:46:55 crc kubenswrapper[4636]: I1002 22:46:55.807988 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-767589cc85-t7ltn_a93dc4a5-ad77-418a-9cd1-23501d201acd/keystone-api/0.log" Oct 02 22:46:55 crc kubenswrapper[4636]: I1002 22:46:55.836357 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_cb2c4720-c639-416e-9217-f53ae67509e9/kube-state-metrics/0.log" Oct 02 22:46:56 crc kubenswrapper[4636]: I1002 22:46:56.159873 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-dfvkt_3e7736a9-7b5a-4d65-ad6e-8814a0a23506/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:46:57 crc kubenswrapper[4636]: I1002 22:46:57.124571 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-86d8p_c30b6801-d279-40ae-8edf-a01189809528/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:46:57 crc kubenswrapper[4636]: I1002 22:46:57.164587 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-655dcc86f7-zxnj7_181f63c0-d749-44d0-8bc8-588a8d1fc12f/neutron-httpd/0.log" Oct 02 22:46:57 crc kubenswrapper[4636]: I1002 22:46:57.351705 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-655dcc86f7-zxnj7_181f63c0-d749-44d0-8bc8-588a8d1fc12f/neutron-api/0.log" Oct 02 22:46:57 crc kubenswrapper[4636]: I1002 22:46:57.563330 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_d85e3d9d-106a-4b20-8496-7394884ca255/memcached/0.log" Oct 02 22:46:58 crc kubenswrapper[4636]: I1002 22:46:58.352771 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_6d5c1a37-2e01-45da-8dc5-06d98b5b07ee/nova-cell0-conductor-conductor/0.log" Oct 02 22:46:58 crc kubenswrapper[4636]: I1002 22:46:58.567154 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_6fd4eafc-387c-4297-9ab1-9f9c0666bf00/nova-cell1-conductor-conductor/0.log" Oct 02 22:46:58 crc kubenswrapper[4636]: I1002 22:46:58.721257 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_44babad0-aad8-4dcf-870d-798d6444957f/nova-api-log/0.log" Oct 02 22:46:58 crc kubenswrapper[4636]: I1002 22:46:58.884390 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_912c7cd9-fffe-40eb-a35c-4a05d26a3374/nova-cell1-novncproxy-novncproxy/0.log" Oct 02 22:46:59 crc kubenswrapper[4636]: I1002 22:46:59.062570 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_44babad0-aad8-4dcf-870d-798d6444957f/nova-api-api/0.log" Oct 02 22:46:59 crc kubenswrapper[4636]: I1002 22:46:59.070433 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-mwm67_d5f9ecc8-8087-42ef-97bb-b083a26f8272/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:46:59 crc kubenswrapper[4636]: I1002 22:46:59.608952 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d97e51be-549b-475b-a257-71df6c99e2ef/nova-metadata-log/0.log" Oct 02 22:46:59 crc kubenswrapper[4636]: I1002 22:46:59.920994 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_80a6d533-3442-4d4f-be04-1e95eefb5598/mysql-bootstrap/0.log" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.020806 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_19136e75-a199-48d4-8b98-40771a6b84fc/nova-scheduler-scheduler/0.log" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.218162 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wvz6n"] Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.220113 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.232440 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvztw\" (UniqueName: \"kubernetes.io/projected/ff822565-4d8f-4b72-86dd-6b204f083b58-kube-api-access-kvztw\") pod \"certified-operators-wvz6n\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.232514 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-catalog-content\") pod \"certified-operators-wvz6n\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.232604 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-utilities\") pod \"certified-operators-wvz6n\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.244652 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wvz6n"] Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.247255 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_80a6d533-3442-4d4f-be04-1e95eefb5598/mysql-bootstrap/0.log" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.295139 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_80a6d533-3442-4d4f-be04-1e95eefb5598/galera/0.log" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.334141 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-catalog-content\") pod \"certified-operators-wvz6n\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.334201 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-utilities\") pod \"certified-operators-wvz6n\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.334296 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvztw\" (UniqueName: \"kubernetes.io/projected/ff822565-4d8f-4b72-86dd-6b204f083b58-kube-api-access-kvztw\") pod \"certified-operators-wvz6n\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.335001 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-catalog-content\") pod \"certified-operators-wvz6n\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.335125 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-utilities\") pod \"certified-operators-wvz6n\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.391318 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvztw\" (UniqueName: \"kubernetes.io/projected/ff822565-4d8f-4b72-86dd-6b204f083b58-kube-api-access-kvztw\") pod \"certified-operators-wvz6n\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.539004 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f2b95105-58f6-4984-92c7-d3dbc7dfa131/mysql-bootstrap/0.log" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.546034 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:00 crc kubenswrapper[4636]: I1002 22:47:00.846206 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d97e51be-549b-475b-a257-71df6c99e2ef/nova-metadata-metadata/0.log" Oct 02 22:47:01 crc kubenswrapper[4636]: I1002 22:47:01.248068 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f2b95105-58f6-4984-92c7-d3dbc7dfa131/galera/0.log" Oct 02 22:47:01 crc kubenswrapper[4636]: I1002 22:47:01.333981 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f2b95105-58f6-4984-92c7-d3dbc7dfa131/mysql-bootstrap/0.log" Oct 02 22:47:01 crc kubenswrapper[4636]: I1002 22:47:01.362218 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_f415d8af-0c69-42dc-b3d5-bb5cfa456768/openstackclient/0.log" Oct 02 22:47:01 crc kubenswrapper[4636]: I1002 22:47:01.520165 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wvz6n"] Oct 02 22:47:01 crc kubenswrapper[4636]: I1002 22:47:01.622303 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-b5dv2_189fcf5f-fafd-4af5-9b02-d8d33b6bfe65/ovn-controller/0.log" Oct 02 22:47:01 crc kubenswrapper[4636]: I1002 22:47:01.715055 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-n42rv_5c99580d-a783-4d8f-9bf7-b8fd883e595e/openstack-network-exporter/0.log" Oct 02 22:47:01 crc kubenswrapper[4636]: I1002 22:47:01.902816 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ffvpv_1ba9bb1f-b0f2-4379-8ac3-7862638e6661/ovsdb-server-init/0.log" Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.077847 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ffvpv_1ba9bb1f-b0f2-4379-8ac3-7862638e6661/ovsdb-server/0.log" Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.092927 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ffvpv_1ba9bb1f-b0f2-4379-8ac3-7862638e6661/ovsdb-server-init/0.log" Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.116344 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ffvpv_1ba9bb1f-b0f2-4379-8ac3-7862638e6661/ovs-vswitchd/0.log" Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.365483 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-jg4dj_681a69d8-8e31-468a-be24-c8ad9db1b0f6/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.389975 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_00dad3a1-1f5c-4a90-a5b2-5437dc73c234/ovn-northd/0.log" Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.418940 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_00dad3a1-1f5c-4a90-a5b2-5437dc73c234/openstack-network-exporter/0.log" Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.446217 4636 generic.go:334] "Generic (PLEG): container finished" podID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerID="3775b853392227b1fff11f72c8daa3aa31403dd50e173016d99478ea2c552ede" exitCode=0 Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.446334 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wvz6n" event={"ID":"ff822565-4d8f-4b72-86dd-6b204f083b58","Type":"ContainerDied","Data":"3775b853392227b1fff11f72c8daa3aa31403dd50e173016d99478ea2c552ede"} Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.446537 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wvz6n" event={"ID":"ff822565-4d8f-4b72-86dd-6b204f083b58","Type":"ContainerStarted","Data":"c8cbf06868a840d21c7972e253b1ef0948891bd4caadd666a18f4dec746a98c5"} Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.645701 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bce7eeaa-df59-4316-835f-9f0c2f233a53/openstack-network-exporter/0.log" Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.692552 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bce7eeaa-df59-4316-835f-9f0c2f233a53/ovsdbserver-nb/0.log" Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.866233 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_aea58193-4a02-48f4-b4c6-b0938df463ff/ovsdbserver-sb/0.log" Oct 02 22:47:02 crc kubenswrapper[4636]: I1002 22:47:02.884127 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_aea58193-4a02-48f4-b4c6-b0938df463ff/openstack-network-exporter/0.log" Oct 02 22:47:03 crc kubenswrapper[4636]: I1002 22:47:03.066249 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-574b88487b-hjf97_52887062-8197-405e-a4ee-6387b60fbf61/placement-api/0.log" Oct 02 22:47:03 crc kubenswrapper[4636]: I1002 22:47:03.269049 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-574b88487b-hjf97_52887062-8197-405e-a4ee-6387b60fbf61/placement-log/0.log" Oct 02 22:47:03 crc kubenswrapper[4636]: I1002 22:47:03.286982 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73/setup-container/0.log" Oct 02 22:47:03 crc kubenswrapper[4636]: I1002 22:47:03.467109 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73/setup-container/0.log" Oct 02 22:47:03 crc kubenswrapper[4636]: I1002 22:47:03.570366 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_379f5124-2304-4bd2-9765-757caebdd35a/setup-container/0.log" Oct 02 22:47:03 crc kubenswrapper[4636]: I1002 22:47:03.595807 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_8ee11ae4-6fa5-47a9-ace2-0a1a6ad36b73/rabbitmq/0.log" Oct 02 22:47:03 crc kubenswrapper[4636]: I1002 22:47:03.857942 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_379f5124-2304-4bd2-9765-757caebdd35a/rabbitmq/0.log" Oct 02 22:47:03 crc kubenswrapper[4636]: I1002 22:47:03.914029 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_379f5124-2304-4bd2-9765-757caebdd35a/setup-container/0.log" Oct 02 22:47:03 crc kubenswrapper[4636]: I1002 22:47:03.949127 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-plk95_fa386dd8-b89e-416f-a564-c4b3396fde09/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:47:04 crc kubenswrapper[4636]: I1002 22:47:04.133616 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-tnvgh_4dd25cb2-7e73-449a-aa3b-2ff97702dad2/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:47:04 crc kubenswrapper[4636]: I1002 22:47:04.486849 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wvz6n" event={"ID":"ff822565-4d8f-4b72-86dd-6b204f083b58","Type":"ContainerStarted","Data":"de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87"} Oct 02 22:47:04 crc kubenswrapper[4636]: I1002 22:47:04.608126 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-82rtw_58767458-ad30-4ec0-aa42-0a6d9634d72d/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:47:04 crc kubenswrapper[4636]: I1002 22:47:04.826600 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-4hs6m_9484ec45-abcc-487a-9790-92f39c74c829/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:47:04 crc kubenswrapper[4636]: I1002 22:47:04.928195 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-27dxg_5a7476b3-58fe-4cf8-9d90-605f10c40e05/ssh-known-hosts-edpm-deployment/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.056984 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6cfb778d8c-pmqb5_4bc228bc-6368-438e-a574-aa4c80d81dc6/proxy-server/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.194396 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6cfb778d8c-pmqb5_4bc228bc-6368-438e-a574-aa4c80d81dc6/proxy-httpd/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.299877 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-hnbwh_07626226-e803-4c29-a34d-9acea829a26b/swift-ring-rebalance/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.468691 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/account-auditor/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.470711 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/account-reaper/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.533542 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/account-replicator/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.656496 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/account-server/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.688205 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/container-replicator/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.692781 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/container-auditor/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.805578 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/container-server/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.859974 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/container-updater/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.914138 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/object-expirer/0.log" Oct 02 22:47:05 crc kubenswrapper[4636]: I1002 22:47:05.937464 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/object-auditor/0.log" Oct 02 22:47:06 crc kubenswrapper[4636]: I1002 22:47:06.040634 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/object-replicator/0.log" Oct 02 22:47:06 crc kubenswrapper[4636]: I1002 22:47:06.095643 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/object-server/0.log" Oct 02 22:47:06 crc kubenswrapper[4636]: I1002 22:47:06.159904 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/object-updater/0.log" Oct 02 22:47:06 crc kubenswrapper[4636]: I1002 22:47:06.164028 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/rsync/0.log" Oct 02 22:47:06 crc kubenswrapper[4636]: I1002 22:47:06.245032 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e9e8a5c2-1a89-4fb3-93d5-877930afc11d/swift-recon-cron/0.log" Oct 02 22:47:06 crc kubenswrapper[4636]: I1002 22:47:06.506690 4636 generic.go:334] "Generic (PLEG): container finished" podID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerID="de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87" exitCode=0 Oct 02 22:47:06 crc kubenswrapper[4636]: I1002 22:47:06.506783 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wvz6n" event={"ID":"ff822565-4d8f-4b72-86dd-6b204f083b58","Type":"ContainerDied","Data":"de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87"} Oct 02 22:47:06 crc kubenswrapper[4636]: I1002 22:47:06.604364 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:47:06 crc kubenswrapper[4636]: E1002 22:47:06.605039 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:47:06 crc kubenswrapper[4636]: I1002 22:47:06.852636 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-l2xnb_cf879426-52a4-4ab6-9271-6b9e0c74ebfb/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:47:06 crc kubenswrapper[4636]: I1002 22:47:06.990151 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_388bd5ff-d88f-4ea6-83e0-0ae99fc188ab/tempest-tests-tempest-tests-runner/0.log" Oct 02 22:47:07 crc kubenswrapper[4636]: I1002 22:47:07.126013 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_a9bd5c32-a2ce-4c0c-b460-c7863fc0976e/test-operator-logs-container/0.log" Oct 02 22:47:07 crc kubenswrapper[4636]: I1002 22:47:07.247882 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-2lgxq_581c99cd-eb0c-4117-8475-44bc85027a9a/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 02 22:47:07 crc kubenswrapper[4636]: I1002 22:47:07.516514 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wvz6n" event={"ID":"ff822565-4d8f-4b72-86dd-6b204f083b58","Type":"ContainerStarted","Data":"e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b"} Oct 02 22:47:10 crc kubenswrapper[4636]: I1002 22:47:10.546796 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:10 crc kubenswrapper[4636]: I1002 22:47:10.547101 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:11 crc kubenswrapper[4636]: I1002 22:47:11.595824 4636 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-wvz6n" podUID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerName="registry-server" probeResult="failure" output=< Oct 02 22:47:11 crc kubenswrapper[4636]: timeout: failed to connect service ":50051" within 1s Oct 02 22:47:11 crc kubenswrapper[4636]: > Oct 02 22:47:19 crc kubenswrapper[4636]: I1002 22:47:19.610311 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:47:19 crc kubenswrapper[4636]: E1002 22:47:19.611050 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:47:20 crc kubenswrapper[4636]: I1002 22:47:20.595788 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:20 crc kubenswrapper[4636]: I1002 22:47:20.615586 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wvz6n" podStartSLOduration=16.031768822 podStartE2EDuration="20.615569992s" podCreationTimestamp="2025-10-02 22:47:00 +0000 UTC" firstStartedPulling="2025-10-02 22:47:02.447699472 +0000 UTC m=+5013.770707481" lastFinishedPulling="2025-10-02 22:47:07.031500632 +0000 UTC m=+5018.354508651" observedRunningTime="2025-10-02 22:47:07.540381703 +0000 UTC m=+5018.863389722" watchObservedRunningTime="2025-10-02 22:47:20.615569992 +0000 UTC m=+5031.938578011" Oct 02 22:47:20 crc kubenswrapper[4636]: I1002 22:47:20.653231 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:20 crc kubenswrapper[4636]: I1002 22:47:20.832576 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wvz6n"] Oct 02 22:47:21 crc kubenswrapper[4636]: I1002 22:47:21.632140 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wvz6n" podUID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerName="registry-server" containerID="cri-o://e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b" gracePeriod=2 Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.160432 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.326204 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-catalog-content\") pod \"ff822565-4d8f-4b72-86dd-6b204f083b58\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.326335 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-utilities\") pod \"ff822565-4d8f-4b72-86dd-6b204f083b58\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.326510 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvztw\" (UniqueName: \"kubernetes.io/projected/ff822565-4d8f-4b72-86dd-6b204f083b58-kube-api-access-kvztw\") pod \"ff822565-4d8f-4b72-86dd-6b204f083b58\" (UID: \"ff822565-4d8f-4b72-86dd-6b204f083b58\") " Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.329057 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-utilities" (OuterVolumeSpecName: "utilities") pod "ff822565-4d8f-4b72-86dd-6b204f083b58" (UID: "ff822565-4d8f-4b72-86dd-6b204f083b58"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.336334 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff822565-4d8f-4b72-86dd-6b204f083b58-kube-api-access-kvztw" (OuterVolumeSpecName: "kube-api-access-kvztw") pod "ff822565-4d8f-4b72-86dd-6b204f083b58" (UID: "ff822565-4d8f-4b72-86dd-6b204f083b58"). InnerVolumeSpecName "kube-api-access-kvztw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.403366 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff822565-4d8f-4b72-86dd-6b204f083b58" (UID: "ff822565-4d8f-4b72-86dd-6b204f083b58"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.429094 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.429127 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff822565-4d8f-4b72-86dd-6b204f083b58-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.429136 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvztw\" (UniqueName: \"kubernetes.io/projected/ff822565-4d8f-4b72-86dd-6b204f083b58-kube-api-access-kvztw\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.641263 4636 generic.go:334] "Generic (PLEG): container finished" podID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerID="e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b" exitCode=0 Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.641323 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wvz6n" event={"ID":"ff822565-4d8f-4b72-86dd-6b204f083b58","Type":"ContainerDied","Data":"e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b"} Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.641357 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wvz6n" event={"ID":"ff822565-4d8f-4b72-86dd-6b204f083b58","Type":"ContainerDied","Data":"c8cbf06868a840d21c7972e253b1ef0948891bd4caadd666a18f4dec746a98c5"} Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.641358 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wvz6n" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.641373 4636 scope.go:117] "RemoveContainer" containerID="e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.661421 4636 scope.go:117] "RemoveContainer" containerID="de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.683498 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wvz6n"] Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.688103 4636 scope.go:117] "RemoveContainer" containerID="3775b853392227b1fff11f72c8daa3aa31403dd50e173016d99478ea2c552ede" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.691477 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wvz6n"] Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.704357 4636 scope.go:117] "RemoveContainer" containerID="e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b" Oct 02 22:47:22 crc kubenswrapper[4636]: E1002 22:47:22.704719 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b\": container with ID starting with e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b not found: ID does not exist" containerID="e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.704760 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b"} err="failed to get container status \"e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b\": rpc error: code = NotFound desc = could not find container \"e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b\": container with ID starting with e5ebfbde38863e0fa07f99933195a27b41aacbaf06d5e980ab03e48ab92a459b not found: ID does not exist" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.704780 4636 scope.go:117] "RemoveContainer" containerID="de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87" Oct 02 22:47:22 crc kubenswrapper[4636]: E1002 22:47:22.704941 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87\": container with ID starting with de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87 not found: ID does not exist" containerID="de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.704962 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87"} err="failed to get container status \"de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87\": rpc error: code = NotFound desc = could not find container \"de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87\": container with ID starting with de91d9556b331966ae0efcd14d68b8472280cc3b05af29a2d5f0d683d226de87 not found: ID does not exist" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.704975 4636 scope.go:117] "RemoveContainer" containerID="3775b853392227b1fff11f72c8daa3aa31403dd50e173016d99478ea2c552ede" Oct 02 22:47:22 crc kubenswrapper[4636]: E1002 22:47:22.705234 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3775b853392227b1fff11f72c8daa3aa31403dd50e173016d99478ea2c552ede\": container with ID starting with 3775b853392227b1fff11f72c8daa3aa31403dd50e173016d99478ea2c552ede not found: ID does not exist" containerID="3775b853392227b1fff11f72c8daa3aa31403dd50e173016d99478ea2c552ede" Oct 02 22:47:22 crc kubenswrapper[4636]: I1002 22:47:22.705257 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3775b853392227b1fff11f72c8daa3aa31403dd50e173016d99478ea2c552ede"} err="failed to get container status \"3775b853392227b1fff11f72c8daa3aa31403dd50e173016d99478ea2c552ede\": rpc error: code = NotFound desc = could not find container \"3775b853392227b1fff11f72c8daa3aa31403dd50e173016d99478ea2c552ede\": container with ID starting with 3775b853392227b1fff11f72c8daa3aa31403dd50e173016d99478ea2c552ede not found: ID does not exist" Oct 02 22:47:23 crc kubenswrapper[4636]: I1002 22:47:23.624279 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff822565-4d8f-4b72-86dd-6b204f083b58" path="/var/lib/kubelet/pods/ff822565-4d8f-4b72-86dd-6b204f083b58/volumes" Oct 02 22:47:33 crc kubenswrapper[4636]: I1002 22:47:33.605157 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:47:33 crc kubenswrapper[4636]: E1002 22:47:33.605912 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.103660 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nft6k"] Oct 02 22:47:36 crc kubenswrapper[4636]: E1002 22:47:36.105391 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerName="extract-content" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.105467 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerName="extract-content" Oct 02 22:47:36 crc kubenswrapper[4636]: E1002 22:47:36.105523 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerName="extract-utilities" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.105582 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerName="extract-utilities" Oct 02 22:47:36 crc kubenswrapper[4636]: E1002 22:47:36.105676 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerName="registry-server" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.105731 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerName="registry-server" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.106005 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff822565-4d8f-4b72-86dd-6b204f083b58" containerName="registry-server" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.115089 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.123019 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nft6k"] Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.240092 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxwp4\" (UniqueName: \"kubernetes.io/projected/2ea653f7-670a-4189-8e20-ab489215f79b-kube-api-access-rxwp4\") pod \"community-operators-nft6k\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.240322 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-utilities\") pod \"community-operators-nft6k\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.240466 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-catalog-content\") pod \"community-operators-nft6k\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.342099 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-utilities\") pod \"community-operators-nft6k\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.342461 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-catalog-content\") pod \"community-operators-nft6k\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.342613 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-utilities\") pod \"community-operators-nft6k\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.342860 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-catalog-content\") pod \"community-operators-nft6k\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.342998 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxwp4\" (UniqueName: \"kubernetes.io/projected/2ea653f7-670a-4189-8e20-ab489215f79b-kube-api-access-rxwp4\") pod \"community-operators-nft6k\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.375879 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxwp4\" (UniqueName: \"kubernetes.io/projected/2ea653f7-670a-4189-8e20-ab489215f79b-kube-api-access-rxwp4\") pod \"community-operators-nft6k\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:36 crc kubenswrapper[4636]: I1002 22:47:36.466021 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:37 crc kubenswrapper[4636]: I1002 22:47:37.113377 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nft6k"] Oct 02 22:47:37 crc kubenswrapper[4636]: I1002 22:47:37.777924 4636 generic.go:334] "Generic (PLEG): container finished" podID="2ea653f7-670a-4189-8e20-ab489215f79b" containerID="e9b4d22332497f23b1e149d5c2819774c2e4a5f43854d3fa51361f71f645bc80" exitCode=0 Oct 02 22:47:37 crc kubenswrapper[4636]: I1002 22:47:37.777967 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nft6k" event={"ID":"2ea653f7-670a-4189-8e20-ab489215f79b","Type":"ContainerDied","Data":"e9b4d22332497f23b1e149d5c2819774c2e4a5f43854d3fa51361f71f645bc80"} Oct 02 22:47:37 crc kubenswrapper[4636]: I1002 22:47:37.777993 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nft6k" event={"ID":"2ea653f7-670a-4189-8e20-ab489215f79b","Type":"ContainerStarted","Data":"3595f17483824a3bac0a2011feea9fb930596968da920c1c83983186bfc8ac31"} Oct 02 22:47:39 crc kubenswrapper[4636]: I1002 22:47:39.794433 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nft6k" event={"ID":"2ea653f7-670a-4189-8e20-ab489215f79b","Type":"ContainerStarted","Data":"32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f"} Oct 02 22:47:40 crc kubenswrapper[4636]: I1002 22:47:40.803707 4636 generic.go:334] "Generic (PLEG): container finished" podID="2ea653f7-670a-4189-8e20-ab489215f79b" containerID="32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f" exitCode=0 Oct 02 22:47:40 crc kubenswrapper[4636]: I1002 22:47:40.804201 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nft6k" event={"ID":"2ea653f7-670a-4189-8e20-ab489215f79b","Type":"ContainerDied","Data":"32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f"} Oct 02 22:47:42 crc kubenswrapper[4636]: I1002 22:47:42.829661 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nft6k" event={"ID":"2ea653f7-670a-4189-8e20-ab489215f79b","Type":"ContainerStarted","Data":"aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88"} Oct 02 22:47:42 crc kubenswrapper[4636]: I1002 22:47:42.851524 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nft6k" podStartSLOduration=3.18804088 podStartE2EDuration="6.851499189s" podCreationTimestamp="2025-10-02 22:47:36 +0000 UTC" firstStartedPulling="2025-10-02 22:47:37.78257523 +0000 UTC m=+5049.105583249" lastFinishedPulling="2025-10-02 22:47:41.446033549 +0000 UTC m=+5052.769041558" observedRunningTime="2025-10-02 22:47:42.846118816 +0000 UTC m=+5054.169126835" watchObservedRunningTime="2025-10-02 22:47:42.851499189 +0000 UTC m=+5054.174507248" Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.496205 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bfcwq"] Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.498607 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.509450 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bfcwq"] Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.586437 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-catalog-content\") pod \"redhat-marketplace-bfcwq\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.586538 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-utilities\") pod \"redhat-marketplace-bfcwq\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.586612 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9c2n\" (UniqueName: \"kubernetes.io/projected/3e457934-0ea0-48f3-8a69-dab629fc1fea-kube-api-access-h9c2n\") pod \"redhat-marketplace-bfcwq\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.688187 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-utilities\") pod \"redhat-marketplace-bfcwq\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.688325 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9c2n\" (UniqueName: \"kubernetes.io/projected/3e457934-0ea0-48f3-8a69-dab629fc1fea-kube-api-access-h9c2n\") pod \"redhat-marketplace-bfcwq\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.688421 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-catalog-content\") pod \"redhat-marketplace-bfcwq\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.688827 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-utilities\") pod \"redhat-marketplace-bfcwq\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.688981 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-catalog-content\") pod \"redhat-marketplace-bfcwq\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.710949 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9c2n\" (UniqueName: \"kubernetes.io/projected/3e457934-0ea0-48f3-8a69-dab629fc1fea-kube-api-access-h9c2n\") pod \"redhat-marketplace-bfcwq\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.858481 4636 generic.go:334] "Generic (PLEG): container finished" podID="731b6858-850b-434d-b249-6ca0836def25" containerID="be9fc0b6de7cfd4084897d526469bf5ad509c5f01f68361ef3d844da90f20e91" exitCode=0 Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.858527 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" event={"ID":"731b6858-850b-434d-b249-6ca0836def25","Type":"ContainerDied","Data":"be9fc0b6de7cfd4084897d526469bf5ad509c5f01f68361ef3d844da90f20e91"} Oct 02 22:47:44 crc kubenswrapper[4636]: I1002 22:47:44.874621 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:45 crc kubenswrapper[4636]: I1002 22:47:45.366919 4636 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bfcwq"] Oct 02 22:47:45 crc kubenswrapper[4636]: I1002 22:47:45.868617 4636 generic.go:334] "Generic (PLEG): container finished" podID="3e457934-0ea0-48f3-8a69-dab629fc1fea" containerID="f0eefd1b321f1c915e7f64088d9553ab67047fab95ed8ebafcce5b0d0877a4e6" exitCode=0 Oct 02 22:47:45 crc kubenswrapper[4636]: I1002 22:47:45.868699 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfcwq" event={"ID":"3e457934-0ea0-48f3-8a69-dab629fc1fea","Type":"ContainerDied","Data":"f0eefd1b321f1c915e7f64088d9553ab67047fab95ed8ebafcce5b0d0877a4e6"} Oct 02 22:47:45 crc kubenswrapper[4636]: I1002 22:47:45.868957 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfcwq" event={"ID":"3e457934-0ea0-48f3-8a69-dab629fc1fea","Type":"ContainerStarted","Data":"aa0f8cdd5f9234074848c5a4b4e5a63c9562b323fee6460a2e72309b34c29444"} Oct 02 22:47:45 crc kubenswrapper[4636]: I1002 22:47:45.972897 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.013397 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9zc6x/crc-debug-z4jqb"] Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.053301 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9zc6x/crc-debug-z4jqb"] Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.112363 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpm5q\" (UniqueName: \"kubernetes.io/projected/731b6858-850b-434d-b249-6ca0836def25-kube-api-access-zpm5q\") pod \"731b6858-850b-434d-b249-6ca0836def25\" (UID: \"731b6858-850b-434d-b249-6ca0836def25\") " Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.112729 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/731b6858-850b-434d-b249-6ca0836def25-host\") pod \"731b6858-850b-434d-b249-6ca0836def25\" (UID: \"731b6858-850b-434d-b249-6ca0836def25\") " Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.113087 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/731b6858-850b-434d-b249-6ca0836def25-host" (OuterVolumeSpecName: "host") pod "731b6858-850b-434d-b249-6ca0836def25" (UID: "731b6858-850b-434d-b249-6ca0836def25"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.113625 4636 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/731b6858-850b-434d-b249-6ca0836def25-host\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.128548 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/731b6858-850b-434d-b249-6ca0836def25-kube-api-access-zpm5q" (OuterVolumeSpecName: "kube-api-access-zpm5q") pod "731b6858-850b-434d-b249-6ca0836def25" (UID: "731b6858-850b-434d-b249-6ca0836def25"). InnerVolumeSpecName "kube-api-access-zpm5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.215387 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpm5q\" (UniqueName: \"kubernetes.io/projected/731b6858-850b-434d-b249-6ca0836def25-kube-api-access-zpm5q\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.466835 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.466907 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.512725 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.880366 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d9cadef7cff2e2460df445aecad067ec0a952f7ed9f2b1fed0d1bde1ebddae1" Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.880720 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-z4jqb" Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.888898 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfcwq" event={"ID":"3e457934-0ea0-48f3-8a69-dab629fc1fea","Type":"ContainerStarted","Data":"9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432"} Oct 02 22:47:46 crc kubenswrapper[4636]: I1002 22:47:46.941010 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.183988 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9zc6x/crc-debug-k9fsb"] Oct 02 22:47:47 crc kubenswrapper[4636]: E1002 22:47:47.184443 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="731b6858-850b-434d-b249-6ca0836def25" containerName="container-00" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.184459 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="731b6858-850b-434d-b249-6ca0836def25" containerName="container-00" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.184638 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="731b6858-850b-434d-b249-6ca0836def25" containerName="container-00" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.185262 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.336150 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkb9c\" (UniqueName: \"kubernetes.io/projected/70562e9c-3e99-475a-bc00-0079e3e4d7ce-kube-api-access-pkb9c\") pod \"crc-debug-k9fsb\" (UID: \"70562e9c-3e99-475a-bc00-0079e3e4d7ce\") " pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.336284 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/70562e9c-3e99-475a-bc00-0079e3e4d7ce-host\") pod \"crc-debug-k9fsb\" (UID: \"70562e9c-3e99-475a-bc00-0079e3e4d7ce\") " pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.438299 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkb9c\" (UniqueName: \"kubernetes.io/projected/70562e9c-3e99-475a-bc00-0079e3e4d7ce-kube-api-access-pkb9c\") pod \"crc-debug-k9fsb\" (UID: \"70562e9c-3e99-475a-bc00-0079e3e4d7ce\") " pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.438393 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/70562e9c-3e99-475a-bc00-0079e3e4d7ce-host\") pod \"crc-debug-k9fsb\" (UID: \"70562e9c-3e99-475a-bc00-0079e3e4d7ce\") " pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.438577 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/70562e9c-3e99-475a-bc00-0079e3e4d7ce-host\") pod \"crc-debug-k9fsb\" (UID: \"70562e9c-3e99-475a-bc00-0079e3e4d7ce\") " pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.466499 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkb9c\" (UniqueName: \"kubernetes.io/projected/70562e9c-3e99-475a-bc00-0079e3e4d7ce-kube-api-access-pkb9c\") pod \"crc-debug-k9fsb\" (UID: \"70562e9c-3e99-475a-bc00-0079e3e4d7ce\") " pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.535940 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" Oct 02 22:47:47 crc kubenswrapper[4636]: W1002 22:47:47.591391 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70562e9c_3e99_475a_bc00_0079e3e4d7ce.slice/crio-699b40d45f7d45b1fbe67933a7505d6496f622c4bc72f2c35330857833196e0b WatchSource:0}: Error finding container 699b40d45f7d45b1fbe67933a7505d6496f622c4bc72f2c35330857833196e0b: Status 404 returned error can't find the container with id 699b40d45f7d45b1fbe67933a7505d6496f622c4bc72f2c35330857833196e0b Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.604484 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:47:47 crc kubenswrapper[4636]: E1002 22:47:47.604903 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.621810 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="731b6858-850b-434d-b249-6ca0836def25" path="/var/lib/kubelet/pods/731b6858-850b-434d-b249-6ca0836def25/volumes" Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.677677 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nft6k"] Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.896429 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" event={"ID":"70562e9c-3e99-475a-bc00-0079e3e4d7ce","Type":"ContainerStarted","Data":"89790e68db690bb3671809b9ad88daa37998ad2c55e97b52a0d6bdb6821c8add"} Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.897150 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" event={"ID":"70562e9c-3e99-475a-bc00-0079e3e4d7ce","Type":"ContainerStarted","Data":"699b40d45f7d45b1fbe67933a7505d6496f622c4bc72f2c35330857833196e0b"} Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.898221 4636 generic.go:334] "Generic (PLEG): container finished" podID="3e457934-0ea0-48f3-8a69-dab629fc1fea" containerID="9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432" exitCode=0 Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.898281 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfcwq" event={"ID":"3e457934-0ea0-48f3-8a69-dab629fc1fea","Type":"ContainerDied","Data":"9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432"} Oct 02 22:47:47 crc kubenswrapper[4636]: I1002 22:47:47.915515 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" podStartSLOduration=0.915496787 podStartE2EDuration="915.496787ms" podCreationTimestamp="2025-10-02 22:47:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 22:47:47.911393808 +0000 UTC m=+5059.234401827" watchObservedRunningTime="2025-10-02 22:47:47.915496787 +0000 UTC m=+5059.238504806" Oct 02 22:47:48 crc kubenswrapper[4636]: I1002 22:47:48.906115 4636 generic.go:334] "Generic (PLEG): container finished" podID="70562e9c-3e99-475a-bc00-0079e3e4d7ce" containerID="89790e68db690bb3671809b9ad88daa37998ad2c55e97b52a0d6bdb6821c8add" exitCode=0 Oct 02 22:47:48 crc kubenswrapper[4636]: I1002 22:47:48.906150 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" event={"ID":"70562e9c-3e99-475a-bc00-0079e3e4d7ce","Type":"ContainerDied","Data":"89790e68db690bb3671809b9ad88daa37998ad2c55e97b52a0d6bdb6821c8add"} Oct 02 22:47:48 crc kubenswrapper[4636]: I1002 22:47:48.916835 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nft6k" podUID="2ea653f7-670a-4189-8e20-ab489215f79b" containerName="registry-server" containerID="cri-o://aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88" gracePeriod=2 Oct 02 22:47:48 crc kubenswrapper[4636]: I1002 22:47:48.917162 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfcwq" event={"ID":"3e457934-0ea0-48f3-8a69-dab629fc1fea","Type":"ContainerStarted","Data":"980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19"} Oct 02 22:47:48 crc kubenswrapper[4636]: I1002 22:47:48.949695 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bfcwq" podStartSLOduration=2.4999567799999998 podStartE2EDuration="4.949676055s" podCreationTimestamp="2025-10-02 22:47:44 +0000 UTC" firstStartedPulling="2025-10-02 22:47:45.873707368 +0000 UTC m=+5057.196715387" lastFinishedPulling="2025-10-02 22:47:48.323426653 +0000 UTC m=+5059.646434662" observedRunningTime="2025-10-02 22:47:48.946684896 +0000 UTC m=+5060.269692915" watchObservedRunningTime="2025-10-02 22:47:48.949676055 +0000 UTC m=+5060.272684074" Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.630674 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.793330 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-utilities\") pod \"2ea653f7-670a-4189-8e20-ab489215f79b\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.793499 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-catalog-content\") pod \"2ea653f7-670a-4189-8e20-ab489215f79b\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.793537 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxwp4\" (UniqueName: \"kubernetes.io/projected/2ea653f7-670a-4189-8e20-ab489215f79b-kube-api-access-rxwp4\") pod \"2ea653f7-670a-4189-8e20-ab489215f79b\" (UID: \"2ea653f7-670a-4189-8e20-ab489215f79b\") " Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.794164 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-utilities" (OuterVolumeSpecName: "utilities") pod "2ea653f7-670a-4189-8e20-ab489215f79b" (UID: "2ea653f7-670a-4189-8e20-ab489215f79b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.794312 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.810324 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ea653f7-670a-4189-8e20-ab489215f79b-kube-api-access-rxwp4" (OuterVolumeSpecName: "kube-api-access-rxwp4") pod "2ea653f7-670a-4189-8e20-ab489215f79b" (UID: "2ea653f7-670a-4189-8e20-ab489215f79b"). InnerVolumeSpecName "kube-api-access-rxwp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.839914 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2ea653f7-670a-4189-8e20-ab489215f79b" (UID: "2ea653f7-670a-4189-8e20-ab489215f79b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.896531 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ea653f7-670a-4189-8e20-ab489215f79b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.896737 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxwp4\" (UniqueName: \"kubernetes.io/projected/2ea653f7-670a-4189-8e20-ab489215f79b-kube-api-access-rxwp4\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.926233 4636 generic.go:334] "Generic (PLEG): container finished" podID="2ea653f7-670a-4189-8e20-ab489215f79b" containerID="aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88" exitCode=0 Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.926266 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nft6k" Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.926288 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nft6k" event={"ID":"2ea653f7-670a-4189-8e20-ab489215f79b","Type":"ContainerDied","Data":"aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88"} Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.926322 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nft6k" event={"ID":"2ea653f7-670a-4189-8e20-ab489215f79b","Type":"ContainerDied","Data":"3595f17483824a3bac0a2011feea9fb930596968da920c1c83983186bfc8ac31"} Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.926340 4636 scope.go:117] "RemoveContainer" containerID="aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88" Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.982108 4636 scope.go:117] "RemoveContainer" containerID="32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f" Oct 02 22:47:49 crc kubenswrapper[4636]: I1002 22:47:49.985056 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.006271 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nft6k"] Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.010636 4636 scope.go:117] "RemoveContainer" containerID="e9b4d22332497f23b1e149d5c2819774c2e4a5f43854d3fa51361f71f645bc80" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.016161 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nft6k"] Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.041495 4636 scope.go:117] "RemoveContainer" containerID="aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88" Oct 02 22:47:50 crc kubenswrapper[4636]: E1002 22:47:50.042789 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88\": container with ID starting with aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88 not found: ID does not exist" containerID="aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.042835 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88"} err="failed to get container status \"aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88\": rpc error: code = NotFound desc = could not find container \"aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88\": container with ID starting with aa9c77d218b43b8b333e2f5077218170ba17da7f7cafcb2c7b1c48d977c48e88 not found: ID does not exist" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.042886 4636 scope.go:117] "RemoveContainer" containerID="32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f" Oct 02 22:47:50 crc kubenswrapper[4636]: E1002 22:47:50.043113 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f\": container with ID starting with 32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f not found: ID does not exist" containerID="32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.043132 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f"} err="failed to get container status \"32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f\": rpc error: code = NotFound desc = could not find container \"32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f\": container with ID starting with 32e7327f355c56b255f30b0c445f1b27d9c16f977a288898005e3c96f09d228f not found: ID does not exist" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.043144 4636 scope.go:117] "RemoveContainer" containerID="e9b4d22332497f23b1e149d5c2819774c2e4a5f43854d3fa51361f71f645bc80" Oct 02 22:47:50 crc kubenswrapper[4636]: E1002 22:47:50.043286 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9b4d22332497f23b1e149d5c2819774c2e4a5f43854d3fa51361f71f645bc80\": container with ID starting with e9b4d22332497f23b1e149d5c2819774c2e4a5f43854d3fa51361f71f645bc80 not found: ID does not exist" containerID="e9b4d22332497f23b1e149d5c2819774c2e4a5f43854d3fa51361f71f645bc80" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.043307 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9b4d22332497f23b1e149d5c2819774c2e4a5f43854d3fa51361f71f645bc80"} err="failed to get container status \"e9b4d22332497f23b1e149d5c2819774c2e4a5f43854d3fa51361f71f645bc80\": rpc error: code = NotFound desc = could not find container \"e9b4d22332497f23b1e149d5c2819774c2e4a5f43854d3fa51361f71f645bc80\": container with ID starting with e9b4d22332497f23b1e149d5c2819774c2e4a5f43854d3fa51361f71f645bc80 not found: ID does not exist" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.099155 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkb9c\" (UniqueName: \"kubernetes.io/projected/70562e9c-3e99-475a-bc00-0079e3e4d7ce-kube-api-access-pkb9c\") pod \"70562e9c-3e99-475a-bc00-0079e3e4d7ce\" (UID: \"70562e9c-3e99-475a-bc00-0079e3e4d7ce\") " Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.099215 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/70562e9c-3e99-475a-bc00-0079e3e4d7ce-host\") pod \"70562e9c-3e99-475a-bc00-0079e3e4d7ce\" (UID: \"70562e9c-3e99-475a-bc00-0079e3e4d7ce\") " Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.099266 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/70562e9c-3e99-475a-bc00-0079e3e4d7ce-host" (OuterVolumeSpecName: "host") pod "70562e9c-3e99-475a-bc00-0079e3e4d7ce" (UID: "70562e9c-3e99-475a-bc00-0079e3e4d7ce"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.099929 4636 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/70562e9c-3e99-475a-bc00-0079e3e4d7ce-host\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.102480 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70562e9c-3e99-475a-bc00-0079e3e4d7ce-kube-api-access-pkb9c" (OuterVolumeSpecName: "kube-api-access-pkb9c") pod "70562e9c-3e99-475a-bc00-0079e3e4d7ce" (UID: "70562e9c-3e99-475a-bc00-0079e3e4d7ce"). InnerVolumeSpecName "kube-api-access-pkb9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.201114 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkb9c\" (UniqueName: \"kubernetes.io/projected/70562e9c-3e99-475a-bc00-0079e3e4d7ce-kube-api-access-pkb9c\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.936043 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.936084 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/crc-debug-k9fsb" event={"ID":"70562e9c-3e99-475a-bc00-0079e3e4d7ce","Type":"ContainerDied","Data":"699b40d45f7d45b1fbe67933a7505d6496f622c4bc72f2c35330857833196e0b"} Oct 02 22:47:50 crc kubenswrapper[4636]: I1002 22:47:50.936500 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="699b40d45f7d45b1fbe67933a7505d6496f622c4bc72f2c35330857833196e0b" Oct 02 22:47:51 crc kubenswrapper[4636]: I1002 22:47:51.616661 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ea653f7-670a-4189-8e20-ab489215f79b" path="/var/lib/kubelet/pods/2ea653f7-670a-4189-8e20-ab489215f79b/volumes" Oct 02 22:47:54 crc kubenswrapper[4636]: I1002 22:47:54.874766 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:54 crc kubenswrapper[4636]: I1002 22:47:54.876333 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:54 crc kubenswrapper[4636]: I1002 22:47:54.928090 4636 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:55 crc kubenswrapper[4636]: I1002 22:47:55.039354 4636 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:55 crc kubenswrapper[4636]: I1002 22:47:55.174270 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bfcwq"] Oct 02 22:47:55 crc kubenswrapper[4636]: I1002 22:47:55.928050 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9zc6x/crc-debug-k9fsb"] Oct 02 22:47:55 crc kubenswrapper[4636]: I1002 22:47:55.934925 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9zc6x/crc-debug-k9fsb"] Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.014657 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bfcwq" podUID="3e457934-0ea0-48f3-8a69-dab629fc1fea" containerName="registry-server" containerID="cri-o://980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19" gracePeriod=2 Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.096725 4636 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9zc6x/crc-debug-hbhdr"] Oct 02 22:47:57 crc kubenswrapper[4636]: E1002 22:47:57.097098 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea653f7-670a-4189-8e20-ab489215f79b" containerName="extract-utilities" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.097116 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea653f7-670a-4189-8e20-ab489215f79b" containerName="extract-utilities" Oct 02 22:47:57 crc kubenswrapper[4636]: E1002 22:47:57.097127 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea653f7-670a-4189-8e20-ab489215f79b" containerName="registry-server" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.097134 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea653f7-670a-4189-8e20-ab489215f79b" containerName="registry-server" Oct 02 22:47:57 crc kubenswrapper[4636]: E1002 22:47:57.097163 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea653f7-670a-4189-8e20-ab489215f79b" containerName="extract-content" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.097169 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea653f7-670a-4189-8e20-ab489215f79b" containerName="extract-content" Oct 02 22:47:57 crc kubenswrapper[4636]: E1002 22:47:57.097183 4636 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70562e9c-3e99-475a-bc00-0079e3e4d7ce" containerName="container-00" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.097188 4636 state_mem.go:107] "Deleted CPUSet assignment" podUID="70562e9c-3e99-475a-bc00-0079e3e4d7ce" containerName="container-00" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.097378 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="70562e9c-3e99-475a-bc00-0079e3e4d7ce" containerName="container-00" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.097393 4636 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ea653f7-670a-4189-8e20-ab489215f79b" containerName="registry-server" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.097964 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.236214 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr74h\" (UniqueName: \"kubernetes.io/projected/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-kube-api-access-cr74h\") pod \"crc-debug-hbhdr\" (UID: \"ec8a4562-9af7-42d7-ae90-7d89dabd1acb\") " pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.236608 4636 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-host\") pod \"crc-debug-hbhdr\" (UID: \"ec8a4562-9af7-42d7-ae90-7d89dabd1acb\") " pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.337847 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr74h\" (UniqueName: \"kubernetes.io/projected/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-kube-api-access-cr74h\") pod \"crc-debug-hbhdr\" (UID: \"ec8a4562-9af7-42d7-ae90-7d89dabd1acb\") " pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.337951 4636 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-host\") pod \"crc-debug-hbhdr\" (UID: \"ec8a4562-9af7-42d7-ae90-7d89dabd1acb\") " pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.338060 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-host\") pod \"crc-debug-hbhdr\" (UID: \"ec8a4562-9af7-42d7-ae90-7d89dabd1acb\") " pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.360919 4636 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr74h\" (UniqueName: \"kubernetes.io/projected/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-kube-api-access-cr74h\") pod \"crc-debug-hbhdr\" (UID: \"ec8a4562-9af7-42d7-ae90-7d89dabd1acb\") " pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.416688 4636 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" Oct 02 22:47:57 crc kubenswrapper[4636]: W1002 22:47:57.444436 4636 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec8a4562_9af7_42d7_ae90_7d89dabd1acb.slice/crio-f3d164af069ce7d880711dd176c7bdac3b8adfa9da568e4a6ed0e403453ac147 WatchSource:0}: Error finding container f3d164af069ce7d880711dd176c7bdac3b8adfa9da568e4a6ed0e403453ac147: Status 404 returned error can't find the container with id f3d164af069ce7d880711dd176c7bdac3b8adfa9da568e4a6ed0e403453ac147 Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.497992 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.613866 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70562e9c-3e99-475a-bc00-0079e3e4d7ce" path="/var/lib/kubelet/pods/70562e9c-3e99-475a-bc00-0079e3e4d7ce/volumes" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.642792 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9c2n\" (UniqueName: \"kubernetes.io/projected/3e457934-0ea0-48f3-8a69-dab629fc1fea-kube-api-access-h9c2n\") pod \"3e457934-0ea0-48f3-8a69-dab629fc1fea\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.642861 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-catalog-content\") pod \"3e457934-0ea0-48f3-8a69-dab629fc1fea\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.642896 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-utilities\") pod \"3e457934-0ea0-48f3-8a69-dab629fc1fea\" (UID: \"3e457934-0ea0-48f3-8a69-dab629fc1fea\") " Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.643814 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-utilities" (OuterVolumeSpecName: "utilities") pod "3e457934-0ea0-48f3-8a69-dab629fc1fea" (UID: "3e457934-0ea0-48f3-8a69-dab629fc1fea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.648987 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e457934-0ea0-48f3-8a69-dab629fc1fea-kube-api-access-h9c2n" (OuterVolumeSpecName: "kube-api-access-h9c2n") pod "3e457934-0ea0-48f3-8a69-dab629fc1fea" (UID: "3e457934-0ea0-48f3-8a69-dab629fc1fea"). InnerVolumeSpecName "kube-api-access-h9c2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.656894 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3e457934-0ea0-48f3-8a69-dab629fc1fea" (UID: "3e457934-0ea0-48f3-8a69-dab629fc1fea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.745005 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9c2n\" (UniqueName: \"kubernetes.io/projected/3e457934-0ea0-48f3-8a69-dab629fc1fea-kube-api-access-h9c2n\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.745039 4636 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:57 crc kubenswrapper[4636]: I1002 22:47:57.745051 4636 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e457934-0ea0-48f3-8a69-dab629fc1fea-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.023458 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" event={"ID":"ec8a4562-9af7-42d7-ae90-7d89dabd1acb","Type":"ContainerStarted","Data":"d72ffee355f21b17617a6ac89061980a23386a3f708171d52b3a3bf12f1b9002"} Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.023594 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" event={"ID":"ec8a4562-9af7-42d7-ae90-7d89dabd1acb","Type":"ContainerStarted","Data":"f3d164af069ce7d880711dd176c7bdac3b8adfa9da568e4a6ed0e403453ac147"} Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.026838 4636 generic.go:334] "Generic (PLEG): container finished" podID="3e457934-0ea0-48f3-8a69-dab629fc1fea" containerID="980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19" exitCode=0 Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.026881 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfcwq" event={"ID":"3e457934-0ea0-48f3-8a69-dab629fc1fea","Type":"ContainerDied","Data":"980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19"} Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.026917 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfcwq" event={"ID":"3e457934-0ea0-48f3-8a69-dab629fc1fea","Type":"ContainerDied","Data":"aa0f8cdd5f9234074848c5a4b4e5a63c9562b323fee6460a2e72309b34c29444"} Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.026918 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bfcwq" Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.026992 4636 scope.go:117] "RemoveContainer" containerID="980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19" Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.078394 4636 scope.go:117] "RemoveContainer" containerID="9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432" Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.083325 4636 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" podStartSLOduration=1.083307491 podStartE2EDuration="1.083307491s" podCreationTimestamp="2025-10-02 22:47:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 22:47:58.039004323 +0000 UTC m=+5069.362012332" watchObservedRunningTime="2025-10-02 22:47:58.083307491 +0000 UTC m=+5069.406315510" Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.100063 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bfcwq"] Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.108589 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bfcwq"] Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.167080 4636 scope.go:117] "RemoveContainer" containerID="f0eefd1b321f1c915e7f64088d9553ab67047fab95ed8ebafcce5b0d0877a4e6" Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.359323 4636 scope.go:117] "RemoveContainer" containerID="980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19" Oct 02 22:47:58 crc kubenswrapper[4636]: E1002 22:47:58.360673 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19\": container with ID starting with 980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19 not found: ID does not exist" containerID="980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19" Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.360799 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19"} err="failed to get container status \"980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19\": rpc error: code = NotFound desc = could not find container \"980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19\": container with ID starting with 980586d86cb96a32f0545401eb023c084996a54177c7ef3dedf6f89d88203f19 not found: ID does not exist" Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.360895 4636 scope.go:117] "RemoveContainer" containerID="9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432" Oct 02 22:47:58 crc kubenswrapper[4636]: E1002 22:47:58.361211 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432\": container with ID starting with 9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432 not found: ID does not exist" containerID="9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432" Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.361299 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432"} err="failed to get container status \"9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432\": rpc error: code = NotFound desc = could not find container \"9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432\": container with ID starting with 9bf553b4b4828f99c782337769c594c11ae467377fe30a259c1624775b652432 not found: ID does not exist" Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.361385 4636 scope.go:117] "RemoveContainer" containerID="f0eefd1b321f1c915e7f64088d9553ab67047fab95ed8ebafcce5b0d0877a4e6" Oct 02 22:47:58 crc kubenswrapper[4636]: E1002 22:47:58.361640 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0eefd1b321f1c915e7f64088d9553ab67047fab95ed8ebafcce5b0d0877a4e6\": container with ID starting with f0eefd1b321f1c915e7f64088d9553ab67047fab95ed8ebafcce5b0d0877a4e6 not found: ID does not exist" containerID="f0eefd1b321f1c915e7f64088d9553ab67047fab95ed8ebafcce5b0d0877a4e6" Oct 02 22:47:58 crc kubenswrapper[4636]: I1002 22:47:58.361727 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0eefd1b321f1c915e7f64088d9553ab67047fab95ed8ebafcce5b0d0877a4e6"} err="failed to get container status \"f0eefd1b321f1c915e7f64088d9553ab67047fab95ed8ebafcce5b0d0877a4e6\": rpc error: code = NotFound desc = could not find container \"f0eefd1b321f1c915e7f64088d9553ab67047fab95ed8ebafcce5b0d0877a4e6\": container with ID starting with f0eefd1b321f1c915e7f64088d9553ab67047fab95ed8ebafcce5b0d0877a4e6 not found: ID does not exist" Oct 02 22:47:59 crc kubenswrapper[4636]: I1002 22:47:59.041853 4636 generic.go:334] "Generic (PLEG): container finished" podID="ec8a4562-9af7-42d7-ae90-7d89dabd1acb" containerID="d72ffee355f21b17617a6ac89061980a23386a3f708171d52b3a3bf12f1b9002" exitCode=0 Oct 02 22:47:59 crc kubenswrapper[4636]: I1002 22:47:59.041975 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" event={"ID":"ec8a4562-9af7-42d7-ae90-7d89dabd1acb","Type":"ContainerDied","Data":"d72ffee355f21b17617a6ac89061980a23386a3f708171d52b3a3bf12f1b9002"} Oct 02 22:47:59 crc kubenswrapper[4636]: I1002 22:47:59.614519 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:47:59 crc kubenswrapper[4636]: E1002 22:47:59.615054 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:47:59 crc kubenswrapper[4636]: I1002 22:47:59.621835 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e457934-0ea0-48f3-8a69-dab629fc1fea" path="/var/lib/kubelet/pods/3e457934-0ea0-48f3-8a69-dab629fc1fea/volumes" Oct 02 22:48:00 crc kubenswrapper[4636]: I1002 22:48:00.493719 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" Oct 02 22:48:00 crc kubenswrapper[4636]: I1002 22:48:00.532781 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9zc6x/crc-debug-hbhdr"] Oct 02 22:48:00 crc kubenswrapper[4636]: I1002 22:48:00.544680 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9zc6x/crc-debug-hbhdr"] Oct 02 22:48:00 crc kubenswrapper[4636]: I1002 22:48:00.609938 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-host\") pod \"ec8a4562-9af7-42d7-ae90-7d89dabd1acb\" (UID: \"ec8a4562-9af7-42d7-ae90-7d89dabd1acb\") " Oct 02 22:48:00 crc kubenswrapper[4636]: I1002 22:48:00.610052 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cr74h\" (UniqueName: \"kubernetes.io/projected/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-kube-api-access-cr74h\") pod \"ec8a4562-9af7-42d7-ae90-7d89dabd1acb\" (UID: \"ec8a4562-9af7-42d7-ae90-7d89dabd1acb\") " Oct 02 22:48:00 crc kubenswrapper[4636]: I1002 22:48:00.611610 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-host" (OuterVolumeSpecName: "host") pod "ec8a4562-9af7-42d7-ae90-7d89dabd1acb" (UID: "ec8a4562-9af7-42d7-ae90-7d89dabd1acb"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 22:48:00 crc kubenswrapper[4636]: I1002 22:48:00.658960 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-kube-api-access-cr74h" (OuterVolumeSpecName: "kube-api-access-cr74h") pod "ec8a4562-9af7-42d7-ae90-7d89dabd1acb" (UID: "ec8a4562-9af7-42d7-ae90-7d89dabd1acb"). InnerVolumeSpecName "kube-api-access-cr74h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:48:00 crc kubenswrapper[4636]: I1002 22:48:00.712305 4636 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-host\") on node \"crc\" DevicePath \"\"" Oct 02 22:48:00 crc kubenswrapper[4636]: I1002 22:48:00.712347 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cr74h\" (UniqueName: \"kubernetes.io/projected/ec8a4562-9af7-42d7-ae90-7d89dabd1acb-kube-api-access-cr74h\") on node \"crc\" DevicePath \"\"" Oct 02 22:48:01 crc kubenswrapper[4636]: I1002 22:48:01.064399 4636 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3d164af069ce7d880711dd176c7bdac3b8adfa9da568e4a6ed0e403453ac147" Oct 02 22:48:01 crc kubenswrapper[4636]: I1002 22:48:01.064457 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/crc-debug-hbhdr" Oct 02 22:48:01 crc kubenswrapper[4636]: I1002 22:48:01.614084 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec8a4562-9af7-42d7-ae90-7d89dabd1acb" path="/var/lib/kubelet/pods/ec8a4562-9af7-42d7-ae90-7d89dabd1acb/volumes" Oct 02 22:48:02 crc kubenswrapper[4636]: I1002 22:48:02.377925 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n_bbe6de43-e02c-4c6a-87aa-226f2385afc6/util/0.log" Oct 02 22:48:02 crc kubenswrapper[4636]: I1002 22:48:02.578116 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n_bbe6de43-e02c-4c6a-87aa-226f2385afc6/util/0.log" Oct 02 22:48:02 crc kubenswrapper[4636]: I1002 22:48:02.579839 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n_bbe6de43-e02c-4c6a-87aa-226f2385afc6/pull/0.log" Oct 02 22:48:02 crc kubenswrapper[4636]: I1002 22:48:02.623538 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n_bbe6de43-e02c-4c6a-87aa-226f2385afc6/pull/0.log" Oct 02 22:48:02 crc kubenswrapper[4636]: I1002 22:48:02.777442 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n_bbe6de43-e02c-4c6a-87aa-226f2385afc6/util/0.log" Oct 02 22:48:02 crc kubenswrapper[4636]: I1002 22:48:02.838182 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n_bbe6de43-e02c-4c6a-87aa-226f2385afc6/pull/0.log" Oct 02 22:48:02 crc kubenswrapper[4636]: I1002 22:48:02.854531 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62fcb4659743ae26c132ec5e9fb5a583684c3dacd258324b84866c9c2agr96n_bbe6de43-e02c-4c6a-87aa-226f2385afc6/extract/0.log" Oct 02 22:48:03 crc kubenswrapper[4636]: I1002 22:48:03.066881 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-b52pz_b29f844c-ab64-4edb-9dee-0d19e4cb78d0/manager/0.log" Oct 02 22:48:03 crc kubenswrapper[4636]: I1002 22:48:03.091053 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-b52pz_b29f844c-ab64-4edb-9dee-0d19e4cb78d0/kube-rbac-proxy/0.log" Oct 02 22:48:03 crc kubenswrapper[4636]: I1002 22:48:03.166285 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-l889h_26738e55-423c-4cab-821d-b9abb0d8a026/kube-rbac-proxy/0.log" Oct 02 22:48:03 crc kubenswrapper[4636]: I1002 22:48:03.307233 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-l889h_26738e55-423c-4cab-821d-b9abb0d8a026/manager/0.log" Oct 02 22:48:03 crc kubenswrapper[4636]: I1002 22:48:03.356998 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-r4gzg_330ebc0e-6515-4a61-9d41-43f11247c659/kube-rbac-proxy/0.log" Oct 02 22:48:03 crc kubenswrapper[4636]: I1002 22:48:03.410570 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-r4gzg_330ebc0e-6515-4a61-9d41-43f11247c659/manager/0.log" Oct 02 22:48:03 crc kubenswrapper[4636]: I1002 22:48:03.566352 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-lc2st_405ad6d4-7f81-48b8-a8a9-ae1d8062f078/kube-rbac-proxy/0.log" Oct 02 22:48:03 crc kubenswrapper[4636]: I1002 22:48:03.652957 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-lc2st_405ad6d4-7f81-48b8-a8a9-ae1d8062f078/manager/0.log" Oct 02 22:48:03 crc kubenswrapper[4636]: I1002 22:48:03.785108 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-7ntw8_28a9b36b-5645-4d73-883a-87028af9455f/kube-rbac-proxy/0.log" Oct 02 22:48:03 crc kubenswrapper[4636]: I1002 22:48:03.857496 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-7ntw8_28a9b36b-5645-4d73-883a-87028af9455f/manager/0.log" Oct 02 22:48:03 crc kubenswrapper[4636]: I1002 22:48:03.970921 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-d74cn_a10a56aa-5ebe-4f98-8f99-fcba515c254d/kube-rbac-proxy/0.log" Oct 02 22:48:04 crc kubenswrapper[4636]: I1002 22:48:04.073848 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-d74cn_a10a56aa-5ebe-4f98-8f99-fcba515c254d/manager/0.log" Oct 02 22:48:04 crc kubenswrapper[4636]: I1002 22:48:04.212125 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-7vrb4_994cd5bb-107d-4426-9549-f5805479b1d7/kube-rbac-proxy/0.log" Oct 02 22:48:04 crc kubenswrapper[4636]: I1002 22:48:04.361960 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-7vrb4_994cd5bb-107d-4426-9549-f5805479b1d7/manager/0.log" Oct 02 22:48:04 crc kubenswrapper[4636]: I1002 22:48:04.384313 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-dpx7v_a7b69bb7-2b74-4724-b459-215b7d515840/kube-rbac-proxy/0.log" Oct 02 22:48:04 crc kubenswrapper[4636]: I1002 22:48:04.467613 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-dpx7v_a7b69bb7-2b74-4724-b459-215b7d515840/manager/0.log" Oct 02 22:48:04 crc kubenswrapper[4636]: I1002 22:48:04.613280 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-ccnnb_a6a3569c-020b-4cf3-8895-8f31e98bae75/kube-rbac-proxy/0.log" Oct 02 22:48:04 crc kubenswrapper[4636]: I1002 22:48:04.673508 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-ccnnb_a6a3569c-020b-4cf3-8895-8f31e98bae75/manager/0.log" Oct 02 22:48:04 crc kubenswrapper[4636]: I1002 22:48:04.815851 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-h8kh7_1f1e85fe-02dd-423c-991d-fafb30119601/kube-rbac-proxy/0.log" Oct 02 22:48:04 crc kubenswrapper[4636]: I1002 22:48:04.995197 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-j7mxj_a0b38373-c0c9-4fd9-ad1b-d7af382f370b/kube-rbac-proxy/0.log" Oct 02 22:48:05 crc kubenswrapper[4636]: I1002 22:48:05.007244 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-h8kh7_1f1e85fe-02dd-423c-991d-fafb30119601/manager/0.log" Oct 02 22:48:05 crc kubenswrapper[4636]: I1002 22:48:05.117696 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-j7mxj_a0b38373-c0c9-4fd9-ad1b-d7af382f370b/manager/0.log" Oct 02 22:48:05 crc kubenswrapper[4636]: I1002 22:48:05.224137 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-zjsn9_a50690d6-0e31-4944-9011-35076543304f/manager/0.log" Oct 02 22:48:05 crc kubenswrapper[4636]: I1002 22:48:05.268694 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-zjsn9_a50690d6-0e31-4944-9011-35076543304f/kube-rbac-proxy/0.log" Oct 02 22:48:05 crc kubenswrapper[4636]: I1002 22:48:05.423972 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-z4lx2_20dffa00-5117-4d3e-8b67-467357444816/kube-rbac-proxy/0.log" Oct 02 22:48:05 crc kubenswrapper[4636]: I1002 22:48:05.547508 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-z4lx2_20dffa00-5117-4d3e-8b67-467357444816/manager/0.log" Oct 02 22:48:05 crc kubenswrapper[4636]: I1002 22:48:05.716908 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-spqgz_8bf57ebc-7603-43a7-8bad-f52c3ad98ecf/manager/0.log" Oct 02 22:48:05 crc kubenswrapper[4636]: I1002 22:48:05.721460 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-spqgz_8bf57ebc-7603-43a7-8bad-f52c3ad98ecf/kube-rbac-proxy/0.log" Oct 02 22:48:05 crc kubenswrapper[4636]: I1002 22:48:05.822713 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9_b8281915-fca8-4081-9240-78d83b1fb453/kube-rbac-proxy/0.log" Oct 02 22:48:06 crc kubenswrapper[4636]: I1002 22:48:06.019294 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678cb9b9_b8281915-fca8-4081-9240-78d83b1fb453/manager/0.log" Oct 02 22:48:06 crc kubenswrapper[4636]: I1002 22:48:06.056512 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-8598b6c48f-b7l8t_72e01c24-5096-468f-8a34-36db5383bf10/kube-rbac-proxy/0.log" Oct 02 22:48:06 crc kubenswrapper[4636]: I1002 22:48:06.277901 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-69cfffd9c8-f6h7r_59487ce5-9c6e-41d8-9b9f-ee966b0579e4/kube-rbac-proxy/0.log" Oct 02 22:48:06 crc kubenswrapper[4636]: I1002 22:48:06.586625 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-c5h77_1c7d8548-5763-4dad-a80a-69027aef1f92/registry-server/0.log" Oct 02 22:48:06 crc kubenswrapper[4636]: I1002 22:48:06.646337 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-4gd5n_46307289-bc17-4194-8ffa-e0cccfbb675c/kube-rbac-proxy/0.log" Oct 02 22:48:06 crc kubenswrapper[4636]: I1002 22:48:06.649979 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-69cfffd9c8-f6h7r_59487ce5-9c6e-41d8-9b9f-ee966b0579e4/operator/0.log" Oct 02 22:48:06 crc kubenswrapper[4636]: I1002 22:48:06.936377 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-j7dds_8be5d41d-4585-496e-b2a1-1f6514902e5a/kube-rbac-proxy/0.log" Oct 02 22:48:07 crc kubenswrapper[4636]: I1002 22:48:07.022257 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-j7dds_8be5d41d-4585-496e-b2a1-1f6514902e5a/manager/0.log" Oct 02 22:48:07 crc kubenswrapper[4636]: I1002 22:48:07.032545 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-4gd5n_46307289-bc17-4194-8ffa-e0cccfbb675c/manager/0.log" Oct 02 22:48:07 crc kubenswrapper[4636]: I1002 22:48:07.216438 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-8598b6c48f-b7l8t_72e01c24-5096-468f-8a34-36db5383bf10/manager/0.log" Oct 02 22:48:07 crc kubenswrapper[4636]: I1002 22:48:07.224726 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-jv2bg_ccf677d0-a6b1-44ad-9d02-70fc42842c3a/operator/0.log" Oct 02 22:48:07 crc kubenswrapper[4636]: I1002 22:48:07.337724 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-bqhzt_c82f218c-6788-4558-b8b0-a375592d9377/kube-rbac-proxy/0.log" Oct 02 22:48:07 crc kubenswrapper[4636]: I1002 22:48:07.479149 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-j44cm_e439d19a-9b7e-4245-baa8-1fb4a6c1f56a/kube-rbac-proxy/0.log" Oct 02 22:48:07 crc kubenswrapper[4636]: I1002 22:48:07.524412 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-bqhzt_c82f218c-6788-4558-b8b0-a375592d9377/manager/0.log" Oct 02 22:48:07 crc kubenswrapper[4636]: I1002 22:48:07.596873 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-j44cm_e439d19a-9b7e-4245-baa8-1fb4a6c1f56a/manager/0.log" Oct 02 22:48:07 crc kubenswrapper[4636]: I1002 22:48:07.679665 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-g9jx5_31a7600e-116a-458a-9102-0e4207fcc73c/kube-rbac-proxy/0.log" Oct 02 22:48:07 crc kubenswrapper[4636]: I1002 22:48:07.686964 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-g9jx5_31a7600e-116a-458a-9102-0e4207fcc73c/manager/0.log" Oct 02 22:48:07 crc kubenswrapper[4636]: I1002 22:48:07.788836 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-pdhqs_0d6cbc2a-0f5b-439d-963e-936e0a6a843d/kube-rbac-proxy/0.log" Oct 02 22:48:08 crc kubenswrapper[4636]: I1002 22:48:08.391562 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-pdhqs_0d6cbc2a-0f5b-439d-963e-936e0a6a843d/manager/0.log" Oct 02 22:48:10 crc kubenswrapper[4636]: I1002 22:48:10.605430 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:48:10 crc kubenswrapper[4636]: E1002 22:48:10.608240 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:48:22 crc kubenswrapper[4636]: I1002 22:48:22.604767 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:48:22 crc kubenswrapper[4636]: E1002 22:48:22.605519 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:48:25 crc kubenswrapper[4636]: I1002 22:48:25.704641 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-7t4m5_16abd9f8-9089-4ecd-8314-06a470d37b8a/control-plane-machine-set-operator/0.log" Oct 02 22:48:25 crc kubenswrapper[4636]: I1002 22:48:25.837093 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-84tsd_6252e8b9-5b10-4dbd-9f02-50e0a5e47233/kube-rbac-proxy/0.log" Oct 02 22:48:25 crc kubenswrapper[4636]: I1002 22:48:25.882608 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-84tsd_6252e8b9-5b10-4dbd-9f02-50e0a5e47233/machine-api-operator/0.log" Oct 02 22:48:33 crc kubenswrapper[4636]: I1002 22:48:33.605540 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:48:33 crc kubenswrapper[4636]: E1002 22:48:33.606455 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:48:39 crc kubenswrapper[4636]: I1002 22:48:39.087311 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-mkcl7_f62f25b1-27ac-4af1-b2bb-e267d068a038/cert-manager-controller/0.log" Oct 02 22:48:39 crc kubenswrapper[4636]: I1002 22:48:39.215539 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-c6gl4_48ac323f-d140-4cca-947b-e85f9028457e/cert-manager-cainjector/0.log" Oct 02 22:48:39 crc kubenswrapper[4636]: I1002 22:48:39.331986 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-lvpzq_4df03eaa-7663-483f-b06d-2332ee514879/cert-manager-webhook/0.log" Oct 02 22:48:47 crc kubenswrapper[4636]: I1002 22:48:47.603938 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:48:47 crc kubenswrapper[4636]: E1002 22:48:47.604789 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:48:52 crc kubenswrapper[4636]: I1002 22:48:52.628989 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-6c8fn_550f107b-cd3d-4c84-a0cb-edf8e0c62db8/nmstate-console-plugin/0.log" Oct 02 22:48:52 crc kubenswrapper[4636]: I1002 22:48:52.671464 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-snqg8_e03267b6-7c4a-419a-b291-aedff26bd214/nmstate-handler/0.log" Oct 02 22:48:52 crc kubenswrapper[4636]: I1002 22:48:52.888289 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-mfcf8_f01c7e1f-9050-4b45-9f9a-18ca3dc9b5da/nmstate-metrics/0.log" Oct 02 22:48:52 crc kubenswrapper[4636]: I1002 22:48:52.892223 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-mfcf8_f01c7e1f-9050-4b45-9f9a-18ca3dc9b5da/kube-rbac-proxy/0.log" Oct 02 22:48:53 crc kubenswrapper[4636]: I1002 22:48:53.156953 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-nr6nl_28949b9c-5546-47a7-bbfe-8263cdc11841/nmstate-webhook/0.log" Oct 02 22:48:53 crc kubenswrapper[4636]: I1002 22:48:53.158687 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-6qt2c_51900dd3-88ff-4e2a-bda7-fa8350f5f2d7/nmstate-operator/0.log" Oct 02 22:49:01 crc kubenswrapper[4636]: I1002 22:49:01.604048 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:49:01 crc kubenswrapper[4636]: E1002 22:49:01.604788 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:49:09 crc kubenswrapper[4636]: I1002 22:49:09.120588 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-mlb7r_21241409-6577-4338-9478-d467bb5035fd/kube-rbac-proxy/0.log" Oct 02 22:49:09 crc kubenswrapper[4636]: I1002 22:49:09.215683 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-mlb7r_21241409-6577-4338-9478-d467bb5035fd/controller/0.log" Oct 02 22:49:09 crc kubenswrapper[4636]: I1002 22:49:09.347377 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-frr-files/0.log" Oct 02 22:49:09 crc kubenswrapper[4636]: I1002 22:49:09.498528 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-reloader/0.log" Oct 02 22:49:09 crc kubenswrapper[4636]: I1002 22:49:09.501645 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-frr-files/0.log" Oct 02 22:49:09 crc kubenswrapper[4636]: I1002 22:49:09.520263 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-metrics/0.log" Oct 02 22:49:09 crc kubenswrapper[4636]: I1002 22:49:09.570174 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-reloader/0.log" Oct 02 22:49:09 crc kubenswrapper[4636]: I1002 22:49:09.900364 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-frr-files/0.log" Oct 02 22:49:09 crc kubenswrapper[4636]: I1002 22:49:09.926033 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-reloader/0.log" Oct 02 22:49:09 crc kubenswrapper[4636]: I1002 22:49:09.950934 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-metrics/0.log" Oct 02 22:49:09 crc kubenswrapper[4636]: I1002 22:49:09.996831 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-metrics/0.log" Oct 02 22:49:10 crc kubenswrapper[4636]: I1002 22:49:10.238396 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-frr-files/0.log" Oct 02 22:49:10 crc kubenswrapper[4636]: I1002 22:49:10.259389 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-metrics/0.log" Oct 02 22:49:10 crc kubenswrapper[4636]: I1002 22:49:10.260283 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/controller/0.log" Oct 02 22:49:10 crc kubenswrapper[4636]: I1002 22:49:10.302798 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/cp-reloader/0.log" Oct 02 22:49:10 crc kubenswrapper[4636]: I1002 22:49:10.457402 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/frr-metrics/0.log" Oct 02 22:49:10 crc kubenswrapper[4636]: I1002 22:49:10.534877 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/kube-rbac-proxy/0.log" Oct 02 22:49:10 crc kubenswrapper[4636]: I1002 22:49:10.538001 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/kube-rbac-proxy-frr/0.log" Oct 02 22:49:10 crc kubenswrapper[4636]: I1002 22:49:10.785337 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/reloader/0.log" Oct 02 22:49:10 crc kubenswrapper[4636]: I1002 22:49:10.921686 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-rrdgb_ba780778-47a6-4a40-a3e4-72215f4e09fc/frr-k8s-webhook-server/0.log" Oct 02 22:49:11 crc kubenswrapper[4636]: I1002 22:49:11.382473 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7d556f69bd-9fsdx_d29773fe-6fbb-4f8c-8bdf-64fedf0df1ad/manager/0.log" Oct 02 22:49:11 crc kubenswrapper[4636]: I1002 22:49:11.634450 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-64bc94fc5b-pf5cc_4868bbba-0967-4af7-820a-ea4b90481964/webhook-server/0.log" Oct 02 22:49:11 crc kubenswrapper[4636]: I1002 22:49:11.771725 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-cjfrp_70229f69-0581-4a0f-9e52-a3bf04d33ddc/kube-rbac-proxy/0.log" Oct 02 22:49:11 crc kubenswrapper[4636]: I1002 22:49:11.885810 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2ngwv_5fd8a5c3-fa7d-45d7-a3e5-4267087c7b15/frr/0.log" Oct 02 22:49:12 crc kubenswrapper[4636]: I1002 22:49:12.742591 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-cjfrp_70229f69-0581-4a0f-9e52-a3bf04d33ddc/speaker/0.log" Oct 02 22:49:16 crc kubenswrapper[4636]: I1002 22:49:16.604585 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:49:16 crc kubenswrapper[4636]: E1002 22:49:16.605316 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:49:25 crc kubenswrapper[4636]: I1002 22:49:25.254719 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d_96db42c7-fb6d-4f65-8618-005ed4e4f8fc/util/0.log" Oct 02 22:49:25 crc kubenswrapper[4636]: I1002 22:49:25.481769 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d_96db42c7-fb6d-4f65-8618-005ed4e4f8fc/pull/0.log" Oct 02 22:49:25 crc kubenswrapper[4636]: I1002 22:49:25.490472 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d_96db42c7-fb6d-4f65-8618-005ed4e4f8fc/pull/0.log" Oct 02 22:49:25 crc kubenswrapper[4636]: I1002 22:49:25.505734 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d_96db42c7-fb6d-4f65-8618-005ed4e4f8fc/util/0.log" Oct 02 22:49:25 crc kubenswrapper[4636]: I1002 22:49:25.700156 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d_96db42c7-fb6d-4f65-8618-005ed4e4f8fc/pull/0.log" Oct 02 22:49:25 crc kubenswrapper[4636]: I1002 22:49:25.713641 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d_96db42c7-fb6d-4f65-8618-005ed4e4f8fc/extract/0.log" Oct 02 22:49:25 crc kubenswrapper[4636]: I1002 22:49:25.979662 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26pr7d_96db42c7-fb6d-4f65-8618-005ed4e4f8fc/util/0.log" Oct 02 22:49:26 crc kubenswrapper[4636]: I1002 22:49:26.146104 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t2h9w_52af42a7-eff9-490f-afed-2868aced615d/extract-utilities/0.log" Oct 02 22:49:26 crc kubenswrapper[4636]: I1002 22:49:26.351814 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t2h9w_52af42a7-eff9-490f-afed-2868aced615d/extract-content/0.log" Oct 02 22:49:26 crc kubenswrapper[4636]: I1002 22:49:26.370407 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t2h9w_52af42a7-eff9-490f-afed-2868aced615d/extract-utilities/0.log" Oct 02 22:49:26 crc kubenswrapper[4636]: I1002 22:49:26.440098 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t2h9w_52af42a7-eff9-490f-afed-2868aced615d/extract-content/0.log" Oct 02 22:49:26 crc kubenswrapper[4636]: I1002 22:49:26.585942 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t2h9w_52af42a7-eff9-490f-afed-2868aced615d/extract-content/0.log" Oct 02 22:49:26 crc kubenswrapper[4636]: I1002 22:49:26.640684 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t2h9w_52af42a7-eff9-490f-afed-2868aced615d/extract-utilities/0.log" Oct 02 22:49:26 crc kubenswrapper[4636]: I1002 22:49:26.885635 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6twd6_07a99b11-82a1-4cc1-82cb-f8e4b256eab8/extract-utilities/0.log" Oct 02 22:49:27 crc kubenswrapper[4636]: I1002 22:49:27.198811 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6twd6_07a99b11-82a1-4cc1-82cb-f8e4b256eab8/extract-utilities/0.log" Oct 02 22:49:27 crc kubenswrapper[4636]: I1002 22:49:27.237429 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t2h9w_52af42a7-eff9-490f-afed-2868aced615d/registry-server/0.log" Oct 02 22:49:27 crc kubenswrapper[4636]: I1002 22:49:27.255351 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6twd6_07a99b11-82a1-4cc1-82cb-f8e4b256eab8/extract-content/0.log" Oct 02 22:49:27 crc kubenswrapper[4636]: I1002 22:49:27.264069 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6twd6_07a99b11-82a1-4cc1-82cb-f8e4b256eab8/extract-content/0.log" Oct 02 22:49:27 crc kubenswrapper[4636]: I1002 22:49:27.419246 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6twd6_07a99b11-82a1-4cc1-82cb-f8e4b256eab8/extract-content/0.log" Oct 02 22:49:27 crc kubenswrapper[4636]: I1002 22:49:27.442718 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6twd6_07a99b11-82a1-4cc1-82cb-f8e4b256eab8/extract-utilities/0.log" Oct 02 22:49:27 crc kubenswrapper[4636]: I1002 22:49:27.604013 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:49:27 crc kubenswrapper[4636]: E1002 22:49:27.604413 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:49:27 crc kubenswrapper[4636]: I1002 22:49:27.836272 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx_65a4377d-c810-430b-ba89-cd89d5f68250/util/0.log" Oct 02 22:49:27 crc kubenswrapper[4636]: I1002 22:49:27.980166 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx_65a4377d-c810-430b-ba89-cd89d5f68250/util/0.log" Oct 02 22:49:28 crc kubenswrapper[4636]: I1002 22:49:28.024664 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx_65a4377d-c810-430b-ba89-cd89d5f68250/pull/0.log" Oct 02 22:49:28 crc kubenswrapper[4636]: I1002 22:49:28.163071 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6twd6_07a99b11-82a1-4cc1-82cb-f8e4b256eab8/registry-server/0.log" Oct 02 22:49:28 crc kubenswrapper[4636]: I1002 22:49:28.183615 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx_65a4377d-c810-430b-ba89-cd89d5f68250/pull/0.log" Oct 02 22:49:28 crc kubenswrapper[4636]: I1002 22:49:28.344237 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx_65a4377d-c810-430b-ba89-cd89d5f68250/pull/0.log" Oct 02 22:49:28 crc kubenswrapper[4636]: I1002 22:49:28.364366 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx_65a4377d-c810-430b-ba89-cd89d5f68250/util/0.log" Oct 02 22:49:28 crc kubenswrapper[4636]: I1002 22:49:28.433368 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cwvpqx_65a4377d-c810-430b-ba89-cd89d5f68250/extract/0.log" Oct 02 22:49:28 crc kubenswrapper[4636]: I1002 22:49:28.564659 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-xzwzk_da2029b8-9b9d-4692-9e88-5ab20a3582ad/marketplace-operator/0.log" Oct 02 22:49:28 crc kubenswrapper[4636]: I1002 22:49:28.647952 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ddfq4_0a586e9c-7ecc-49a9-8a0d-8b686def8a59/extract-utilities/0.log" Oct 02 22:49:28 crc kubenswrapper[4636]: I1002 22:49:28.835234 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ddfq4_0a586e9c-7ecc-49a9-8a0d-8b686def8a59/extract-utilities/0.log" Oct 02 22:49:28 crc kubenswrapper[4636]: I1002 22:49:28.861370 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ddfq4_0a586e9c-7ecc-49a9-8a0d-8b686def8a59/extract-content/0.log" Oct 02 22:49:28 crc kubenswrapper[4636]: I1002 22:49:28.893404 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ddfq4_0a586e9c-7ecc-49a9-8a0d-8b686def8a59/extract-content/0.log" Oct 02 22:49:29 crc kubenswrapper[4636]: I1002 22:49:29.091718 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ddfq4_0a586e9c-7ecc-49a9-8a0d-8b686def8a59/extract-content/0.log" Oct 02 22:49:29 crc kubenswrapper[4636]: I1002 22:49:29.093918 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ddfq4_0a586e9c-7ecc-49a9-8a0d-8b686def8a59/extract-utilities/0.log" Oct 02 22:49:29 crc kubenswrapper[4636]: I1002 22:49:29.211006 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ddfq4_0a586e9c-7ecc-49a9-8a0d-8b686def8a59/registry-server/0.log" Oct 02 22:49:29 crc kubenswrapper[4636]: I1002 22:49:29.312572 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cklqw_db50ff88-f896-4e65-84cb-d7e02349ad30/extract-utilities/0.log" Oct 02 22:49:29 crc kubenswrapper[4636]: I1002 22:49:29.491023 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cklqw_db50ff88-f896-4e65-84cb-d7e02349ad30/extract-content/0.log" Oct 02 22:49:29 crc kubenswrapper[4636]: I1002 22:49:29.499371 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cklqw_db50ff88-f896-4e65-84cb-d7e02349ad30/extract-content/0.log" Oct 02 22:49:29 crc kubenswrapper[4636]: I1002 22:49:29.504083 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cklqw_db50ff88-f896-4e65-84cb-d7e02349ad30/extract-utilities/0.log" Oct 02 22:49:29 crc kubenswrapper[4636]: I1002 22:49:29.656526 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cklqw_db50ff88-f896-4e65-84cb-d7e02349ad30/extract-utilities/0.log" Oct 02 22:49:29 crc kubenswrapper[4636]: I1002 22:49:29.682539 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cklqw_db50ff88-f896-4e65-84cb-d7e02349ad30/extract-content/0.log" Oct 02 22:49:30 crc kubenswrapper[4636]: I1002 22:49:30.175452 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cklqw_db50ff88-f896-4e65-84cb-d7e02349ad30/registry-server/0.log" Oct 02 22:49:42 crc kubenswrapper[4636]: I1002 22:49:42.604370 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:49:42 crc kubenswrapper[4636]: E1002 22:49:42.605179 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:49:54 crc kubenswrapper[4636]: I1002 22:49:54.603620 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:49:54 crc kubenswrapper[4636]: E1002 22:49:54.604398 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:50:07 crc kubenswrapper[4636]: I1002 22:50:07.603563 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:50:07 crc kubenswrapper[4636]: E1002 22:50:07.604341 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:50:19 crc kubenswrapper[4636]: I1002 22:50:19.618547 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:50:19 crc kubenswrapper[4636]: E1002 22:50:19.619485 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:50:34 crc kubenswrapper[4636]: I1002 22:50:34.604363 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:50:34 crc kubenswrapper[4636]: E1002 22:50:34.605063 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:50:46 crc kubenswrapper[4636]: I1002 22:50:46.603365 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:50:46 crc kubenswrapper[4636]: E1002 22:50:46.604167 4636 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2l2mr_openshift-machine-config-operator(7a136ab0-a86b-4cf4-a332-8c569e1ca777)\"" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" Oct 02 22:51:00 crc kubenswrapper[4636]: I1002 22:51:00.603840 4636 scope.go:117] "RemoveContainer" containerID="a5ccdb2fbae0d6bdb7814c9dccefccfa30b5f2b9240d87c2e9a15616b8aa5719" Oct 02 22:51:01 crc kubenswrapper[4636]: I1002 22:51:01.623018 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" event={"ID":"7a136ab0-a86b-4cf4-a332-8c569e1ca777","Type":"ContainerStarted","Data":"ca5d43c6a716adae026f97b86ecd81f4f71ceb6956565ad3d106fe4f305f3698"} Oct 02 22:52:07 crc kubenswrapper[4636]: I1002 22:52:07.275471 4636 generic.go:334] "Generic (PLEG): container finished" podID="b6333460-ee9d-4e83-b3b3-4ec81848af13" containerID="07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759" exitCode=0 Oct 02 22:52:07 crc kubenswrapper[4636]: I1002 22:52:07.275524 4636 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zc6x/must-gather-6ms5s" event={"ID":"b6333460-ee9d-4e83-b3b3-4ec81848af13","Type":"ContainerDied","Data":"07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759"} Oct 02 22:52:07 crc kubenswrapper[4636]: I1002 22:52:07.277218 4636 scope.go:117] "RemoveContainer" containerID="07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759" Oct 02 22:52:07 crc kubenswrapper[4636]: I1002 22:52:07.394663 4636 scope.go:117] "RemoveContainer" containerID="be9fc0b6de7cfd4084897d526469bf5ad509c5f01f68361ef3d844da90f20e91" Oct 02 22:52:08 crc kubenswrapper[4636]: I1002 22:52:08.213305 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9zc6x_must-gather-6ms5s_b6333460-ee9d-4e83-b3b3-4ec81848af13/gather/0.log" Oct 02 22:52:17 crc kubenswrapper[4636]: I1002 22:52:17.523417 4636 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9zc6x/must-gather-6ms5s"] Oct 02 22:52:17 crc kubenswrapper[4636]: I1002 22:52:17.524462 4636 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-9zc6x/must-gather-6ms5s" podUID="b6333460-ee9d-4e83-b3b3-4ec81848af13" containerName="copy" containerID="cri-o://99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479" gracePeriod=2 Oct 02 22:52:17 crc kubenswrapper[4636]: I1002 22:52:17.551349 4636 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9zc6x/must-gather-6ms5s"] Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.076075 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9zc6x_must-gather-6ms5s_b6333460-ee9d-4e83-b3b3-4ec81848af13/copy/0.log" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.080157 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/must-gather-6ms5s" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.146356 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b6333460-ee9d-4e83-b3b3-4ec81848af13-must-gather-output\") pod \"b6333460-ee9d-4e83-b3b3-4ec81848af13\" (UID: \"b6333460-ee9d-4e83-b3b3-4ec81848af13\") " Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.146530 4636 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4n9dv\" (UniqueName: \"kubernetes.io/projected/b6333460-ee9d-4e83-b3b3-4ec81848af13-kube-api-access-4n9dv\") pod \"b6333460-ee9d-4e83-b3b3-4ec81848af13\" (UID: \"b6333460-ee9d-4e83-b3b3-4ec81848af13\") " Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.161391 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6333460-ee9d-4e83-b3b3-4ec81848af13-kube-api-access-4n9dv" (OuterVolumeSpecName: "kube-api-access-4n9dv") pod "b6333460-ee9d-4e83-b3b3-4ec81848af13" (UID: "b6333460-ee9d-4e83-b3b3-4ec81848af13"). InnerVolumeSpecName "kube-api-access-4n9dv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.249014 4636 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4n9dv\" (UniqueName: \"kubernetes.io/projected/b6333460-ee9d-4e83-b3b3-4ec81848af13-kube-api-access-4n9dv\") on node \"crc\" DevicePath \"\"" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.337675 4636 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6333460-ee9d-4e83-b3b3-4ec81848af13-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "b6333460-ee9d-4e83-b3b3-4ec81848af13" (UID: "b6333460-ee9d-4e83-b3b3-4ec81848af13"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.350443 4636 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b6333460-ee9d-4e83-b3b3-4ec81848af13-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.367983 4636 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9zc6x_must-gather-6ms5s_b6333460-ee9d-4e83-b3b3-4ec81848af13/copy/0.log" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.369024 4636 generic.go:334] "Generic (PLEG): container finished" podID="b6333460-ee9d-4e83-b3b3-4ec81848af13" containerID="99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479" exitCode=143 Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.369085 4636 scope.go:117] "RemoveContainer" containerID="99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.369231 4636 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zc6x/must-gather-6ms5s" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.396131 4636 scope.go:117] "RemoveContainer" containerID="07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.447570 4636 scope.go:117] "RemoveContainer" containerID="99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479" Oct 02 22:52:18 crc kubenswrapper[4636]: E1002 22:52:18.448229 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479\": container with ID starting with 99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479 not found: ID does not exist" containerID="99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.448262 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479"} err="failed to get container status \"99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479\": rpc error: code = NotFound desc = could not find container \"99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479\": container with ID starting with 99a491fdf3217593fb1e4aeab80700d99dfbe88bc80cc3a2d72980d2d6692479 not found: ID does not exist" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.448281 4636 scope.go:117] "RemoveContainer" containerID="07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759" Oct 02 22:52:18 crc kubenswrapper[4636]: E1002 22:52:18.448492 4636 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759\": container with ID starting with 07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759 not found: ID does not exist" containerID="07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759" Oct 02 22:52:18 crc kubenswrapper[4636]: I1002 22:52:18.448515 4636 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759"} err="failed to get container status \"07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759\": rpc error: code = NotFound desc = could not find container \"07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759\": container with ID starting with 07c9ab54a828b4865b95deb3ffece931eb8976ed6067971224d92abca128c759 not found: ID does not exist" Oct 02 22:52:19 crc kubenswrapper[4636]: I1002 22:52:19.627785 4636 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6333460-ee9d-4e83-b3b3-4ec81848af13" path="/var/lib/kubelet/pods/b6333460-ee9d-4e83-b3b3-4ec81848af13/volumes" Oct 02 22:53:23 crc kubenswrapper[4636]: I1002 22:53:23.117465 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:53:23 crc kubenswrapper[4636]: I1002 22:53:23.118198 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:53:53 crc kubenswrapper[4636]: I1002 22:53:53.117733 4636 patch_prober.go:28] interesting pod/machine-config-daemon-2l2mr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 22:53:53 crc kubenswrapper[4636]: I1002 22:53:53.118371 4636 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2l2mr" podUID="7a136ab0-a86b-4cf4-a332-8c569e1ca777" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 22:54:07 crc kubenswrapper[4636]: I1002 22:54:07.477216 4636 scope.go:117] "RemoveContainer" containerID="d72ffee355f21b17617a6ac89061980a23386a3f708171d52b3a3bf12f1b9002" Oct 02 22:54:07 crc kubenswrapper[4636]: I1002 22:54:07.502041 4636 scope.go:117] "RemoveContainer" containerID="89790e68db690bb3671809b9ad88daa37998ad2c55e97b52a0d6bdb6821c8add" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067601243024451 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067601244017367 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067566065016525 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067566065015475 5ustar corecore